In [ ]:
import pandas as pd
from sklearn.preprocessing import LabelEncoder, StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error, r2_score
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.svm import SVR
from sklearn.ensemble import GradientBoostingRegressor
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dropout, Dense
from tensorflow.keras.utils import plot_model
from keras.callbacks import EarlyStopping
from sklearn.preprocessing import MinMaxScaler
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
from xgboost import XGBRegressor
import numpy as np
import seaborn as sns
from keras.models import Sequential
from keras.layers import LSTM, Dense, Dropout
from keras.layers import GRU
from keras.layers import Bidirectional
from sklearn.svm import SVR
from sklearn.mixture import GaussianMixture
from sklearn.cluster import KMeans
In [ ]:
# Load the data
beverage_data = pd.read_excel('Beverage manufacturing data.xlsx')
# Display the first few rows and the columns to understand its structure
print(beverage_data.head())
print(beverage_data.columns)
Indicator Company Year Value Category \
0 - Accumulated depreciation BHN 2020 -6.0 ASSETS
1 - Accumulated depreciation BHN 2021 -7.0 ASSETS
2 - Accumulated depreciation BHN 2022 -8.0 ASSETS
3 - Accumulated depreciation SAB 2020 -27.0 ASSETS
4 - Accumulated depreciation SAB 2021 -26.0 ASSETS
Category 2
0 B. LONG-TERM ASSETS
1 B. LONG-TERM ASSETS
2 B. LONG-TERM ASSETS
3 B. LONG-TERM ASSETS
4 B. LONG-TERM ASSETS
Index(['Indicator', 'Company', 'Year', 'Value', 'Category', 'Category 2'], dtype='object')
In [ ]:
# Display the unique values in the 'Indicator' column to understand its content
unique_indicators = beverage_data['Indicator'].unique()
print(unique_indicators)
['\xa0\xa0\xa0\xa0\xa0- Accumulated depreciation' '\xa0\xa0\xa0\xa0\xa0- Accumulated retained earning at the end of the previous period' '\xa0\xa0\xa0\xa0\xa0- Common stock with voting right' '\xa0\xa0\xa0\xa0\xa0- Cost' '\xa0\xa0\xa0\xa0\xa0- Undistributed earnings in this period' '\xa0\xa0\xa0\xa0\xa0\xa0- Accumulated depreciation' '\xa0\xa0\xa0\xa0\xa0\xa0- Cost' "\xa0\xa0\xa0\xa0\xa01. Owner's capital" '\xa0\xa0\xa0\xa0\xa01. Tangible fixed assets' '\xa0\xa0\xa0\xa0\xa011. Undistributed earnings after tax' '\xa0\xa0\xa0\xa0\xa02. Financial leased fixed assets' '\xa0\xa0\xa0\xa0\xa03. Intangible fixed assets' '\xa0\xa0\xa0\xa01. Cash' '\xa0\xa0\xa0\xa01. Inventories' '\xa0\xa0\xa0\xa01. Long-term prepayments' '\xa0\xa0\xa0\xa01. Long-term trade payables' '\xa0\xa0\xa0\xa01. Long-term trade receivables' '\xa0\xa0\xa0\xa01. Short-term prepayments' '\xa0\xa0\xa0\xa01. Short-term trade accounts payable' '\xa0\xa0\xa0\xa01. Short-term trade accounts receivable' '\xa0\xa0\xa0\xa01. Subsidized not-for-profit funds' "\xa0\xa0\xa0\xa010. Other funds from owner's equity" '\xa0\xa0\xa0\xa010. Short-term borrowings and financial leases' '\xa0\xa0\xa0\xa011. Deferred income tax liabilities' '\xa0\xa0\xa0\xa011. Provision for short-term liabilities' '\xa0\xa0\xa0\xa012. Provision for long-term liabilities' '\xa0\xa0\xa0\xa012.. Bonus and welfare fund' '\xa0\xa0\xa0\xa013. Fund for technology development' "\xa0\xa0\xa0\xa013. Minority's interest" '\xa0\xa0\xa0\xa02. Cash equivalents' '\xa0\xa0\xa0\xa02. Construction in progress' '\xa0\xa0\xa0\xa02. Deferred income tax assets' '\xa0\xa0\xa0\xa02. Funds invested in fixed assets' '\xa0\xa0\xa0\xa02. Investments in associates, joint-ventures' '\xa0\xa0\xa0\xa02. Long-term prepayments to suppliers' '\xa0\xa0\xa0\xa02. Provision for decline in value of inventories' '\xa0\xa0\xa0\xa02. Share premium' '\xa0\xa0\xa0\xa02. Short-term advances from customers' '\xa0\xa0\xa0\xa02. Short-term prepayments to suppliers' '\xa0\xa0\xa0\xa02. Value added tax to be reclaimed' '\xa0\xa0\xa0\xa03. Held to maturity investments' '\xa0\xa0\xa0\xa03. Investments in other entities' '\xa0\xa0\xa0\xa03. Long-term equipment, supplies, spare parts' '\xa0\xa0\xa0\xa03. Taxes and other payables to state authorities' '\xa0\xa0\xa0\xa03. Taxes and other receivables from state authorities' '\xa0\xa0\xa0\xa04. Inter-company payables on business capital' '\xa0\xa0\xa0\xa04. Other capital of owners' '\xa0\xa0\xa0\xa04. Payable to employees' '\xa0\xa0\xa0\xa04. Provision for diminution in value of long-term investments' '\xa0\xa0\xa0\xa05. Held to maturity investments' '\xa0\xa0\xa0\xa05. Long-term loan receivables' '\xa0\xa0\xa0\xa05. Short-term acrrued expenses' '\xa0\xa0\xa0\xa05. Short-term loan receivables' '\xa0\xa0\xa0\xa05. Treasury shares' '\xa0\xa0\xa0\xa06. Other long-term receivables' '\xa0\xa0\xa0\xa06. Other short-term receivables' '\xa0\xa0\xa0\xa07. Foreign exchange differences' '\xa0\xa0\xa0\xa07. Other long-term liabilities' '\xa0\xa0\xa0\xa07. Provision for long-term doubtful debts' '\xa0\xa0\xa0\xa07. Provision for short-term doubtful debts (*)' '\xa0\xa0\xa0\xa08. Assets awaiting resolution' '\xa0\xa0\xa0\xa08. Investment and development fund' '\xa0\xa0\xa0\xa08. Long-term borrowings and financial leases' '\xa0\xa0\xa0\xa08. Short-term unearned revenue' '\xa0\xa0\xa0\xa09. Other short-term payables' '\xa0\xa0\xa0\xa0I. Cash and cash equivalents' '\xa0\xa0\xa0\xa0I. Long-term receivables' "\xa0\xa0\xa0\xa0I. Owner's equity" '\xa0\xa0\xa0\xa0I. Short -term liabilities' '\xa0\xa0\xa0\xa0II. Fixed assets' '\xa0\xa0\xa0\xa0II. Long-term liabilities' '\xa0\xa0\xa0\xa0II. Other resources and funds' '\xa0\xa0\xa0\xa0II. Short-term financial investments' '\xa0\xa0\xa0\xa0III. Investment properties' '\xa0\xa0\xa0\xa0III. Short-term receivables' '\xa0\xa0\xa0\xa0IV. Inventories' '\xa0\xa0\xa0\xa0IV. Long-term assets in progress' '\xa0\xa0\xa0\xa0V. Long-term financial investments' '\xa0\xa0\xa0\xa0V. Other short-term assets' '\xa0\xa0\xa0\xa0VI. Other long-term assets' '\xa0\xa0\xa0A. LIABILITIES' '\xa0\xa0\xa0A. SHORT-TERM ASSETS' '\xa0\xa0\xa0B. LONG-TERM ASSETS' "\xa0\xa0\xa0B. OWNER'S EQUITY" '\xa0\xa0\xa0VII. Goodwill' '\xa0TOTAL ASSETS' "\xa0TOTAL OWNER'S EQUITY AND LIABILITIES" '(Increase)/decrease in inventories' '(Increase)/decrease in prepaid expenses' '(Increase)/decrease in receivables' '(Reversal of provisions)/provisions' '1. Payment for fixed assets, constructions and other long-term assets' '1. Profit before tax' '1. Revenue' '10. General and administrative expenses' '11. Operating profit' '12. Other income' '13.Other expenses' '14. Other profit' '15. Profit before tax' '16. Current corporate income tax expenses' '17. Deferred income tax expenses (*)' '18. Net profit after tax' '19. Earnings per share' '2. Deduction from revenue' '2. Receipts from disposal of fixed assets and other long-term assets' '20. Diluted earnings per share' "3. Loans, purchases of other entities' debt instruments" '3. Net revenue' '3. Operating profit before changes in working capital' '3. Proceeds from borrowings' '4. Cost of goods sold' '4. Principal repayments' "4. Receipts from loan repayments, sale of other entities' debt instruments" '5. Gross profit' '5. Repayment of financial leases' '6. Collections on investment in other entities' '6. Dividends paid, profits distributed to owners' '6. Financial income' '7. Dividends, interest and profit received' '7. Financial expenses' "8. Share of associates and joint ventures' result" '9. Selling expenses' 'Accrual ratio (Balance sheet method)' 'Accrual ratio (Cash flow method)' 'Accrual ratio CF' 'Adjustments for' 'Beta' 'Book value per share (BVPS)' 'Cash and cash equivalents at beginning of the period' 'Cash and cash equivalents at end of the period' 'Cash flow per share (CPS)' 'Cash ratio' 'Cash return on equity' 'Cash return to assets' 'Cash to income' 'Cash/Short-term assets' 'Construction in progress/Fixed assets' 'Corporate income tax paid' 'Cost of goods sold/Net revenue' 'Days of inventory on hand' 'Days of sales outstanding' 'Debt coverage' 'Debt to assets' 'Debt to equity' 'Depreciation of fixed assets and properties investment' 'Dividend yield' 'EBIT margin' 'EBITDA/Net revenue' 'Equity to assets' 'Equity turnover' 'EV/EBIT' 'EV/EBITDA' 'Exchange difference due to re-valuation of ending balances' 'Finance lease/Fixed assets' 'Fixed asset turnover' 'Fixed assets/Total assets' 'Foreign exchange (gain)/loss from revaluation of monetary items denominated in foreign currencies' 'General and Administrative expenses/Net revenue' 'Gross profit' 'Gross profit margin' 'Increase/(decrease) in payables (other than interest, corporate income tax)' 'Intangible fixed assets/Fixed assets' 'Interest coverage' 'Interest expense' 'Interest expenses/Net revenue' 'Interest paid' 'Inventory turnover' 'Inventory/Short-term assets' 'Liabilities' 'Liabilities to assets' 'Liabilities to equity' 'Long-term assets/Total assets' 'Long-term liabilities' 'Loss/(profit) from investment activities' "Minority's interest" 'Net cash flows during the period' 'Net cash flows from financing activities' 'Net cash flows from investing activities' 'Net cash flows from operating activities' 'Net cash flows/Short -term liabilities' 'Net profit margin' 'Net revenue' 'Number of days of payables' 'Of which: Interest expenses' 'Other payments for operating activities' 'Other receipts from operating activities' 'Other Short-term assets/Short-term assets' "Owner's equity" 'P/B' 'P/E' 'P/S' 'Payables turnover' 'Profit after tax for shareholders of parent company' 'Profit after tax for shareholders of the parent company' 'Profit before tax' 'Quick ratio' 'Quick ratio (except: Inventories, Short-term receivables - reference)' 'Ratios - BHN' 'Ratios - HAD' 'Ratios - HAT' 'Ratios - SAB' 'Ratios - SCD' 'Ratios - SMB' 'Ratios - THB' 'Ratios - VCF' 'Ratios - VDL' 'Receivables turnover' 'Return on capital employed (ROCE)' 'ROA' 'ROE' 'Selling expenses/Net revenue' 'Short-term assets/Total assets' 'Short-term investments/Short-term assets' 'Short-term liabilities to equity' 'Short-term liabilities to total liabilities' 'Short-term ratio' 'Short-term receivables/Short-term assets' 'Tangible fixed assets/Fixed assets' 'Total asset turnover' 'Total assets' 'Trailing EPS']
In [ ]:
# Replace missing values with "Unknown"
beverage_data['Category'].fillna('Unknown', inplace=True)
beverage_data['Category 2'].fillna('Unknown', inplace=True)
# Ensure all values in 'Indicator' column are strings
beverage_data['Indicator'] = beverage_data['Indicator'].astype(str).str.strip()
# Verify the changes
beverage_data.isnull().sum()
/var/folders/57/3tgq502n7y31r_7lgrpyp6cc0000gn/T/ipykernel_1810/2716560748.py:2: FutureWarning: A value is trying to be set on a copy of a DataFrame or Series through chained assignment using an inplace method.
The behavior will change in pandas 3.0. This inplace method will never work because the intermediate object on which we are setting values always behaves as a copy.
For example, when doing 'df[col].method(value, inplace=True)', try using 'df.method({col: value}, inplace=True)' or df[col] = df[col].method(value) instead, to perform the operation inplace on the original object.
beverage_data['Category'].fillna('Unknown', inplace=True)
/var/folders/57/3tgq502n7y31r_7lgrpyp6cc0000gn/T/ipykernel_1810/2716560748.py:3: FutureWarning: A value is trying to be set on a copy of a DataFrame or Series through chained assignment using an inplace method.
The behavior will change in pandas 3.0. This inplace method will never work because the intermediate object on which we are setting values always behaves as a copy.
For example, when doing 'df[col].method(value, inplace=True)', try using 'df.method({col: value}, inplace=True)' or df[col] = df[col].method(value) instead, to perform the operation inplace on the original object.
beverage_data['Category 2'].fillna('Unknown', inplace=True)
Out[ ]:
Indicator 0 Company 0 Year 0 Value 0 Category 0 Category 2 0 dtype: int64
In [ ]:
# Identify non-numeric columns
non_numeric_columns = beverage_data.select_dtypes(include=['object']).columns
print("Non-numeric columns before encoding:", non_numeric_columns)
Non-numeric columns before encoding: Index(['Indicator', 'Company', 'Category', 'Category 2'], dtype='object')
In [ ]:
# Encode categorical columns
label_encoder = LabelEncoder()
for column in non_numeric_columns:
beverage_data[column] = label_encoder.fit_transform(beverage_data[column])
# Verify encoding
non_numeric_columns = beverage_data.select_dtypes(include=['object']).columns
print("Non-numeric columns after encoding:", non_numeric_columns)
Non-numeric columns after encoding: Index([], dtype='object')
In [ ]:
# Normalize the 'Value' column
scaler = StandardScaler()
beverage_data['Value'] = scaler.fit_transform(beverage_data[['Value']])
# Display the first few rows of the preprocessed data
beverage_data.head()
Out[ ]:
| Indicator | Company | Year | Value | Category | Category 2 | |
|---|---|---|---|---|---|---|
| 0 | 4 | 0 | 2020 | -0.180585 | 0 | 21 |
| 1 | 4 | 0 | 2021 | -0.180879 | 0 | 21 |
| 2 | 4 | 0 | 2022 | -0.181173 | 0 | 21 |
| 3 | 4 | 3 | 2020 | -0.186758 | 0 | 21 |
| 4 | 4 | 3 | 2021 | -0.186464 | 0 | 21 |
In [ ]:
# Summary statistics
desc_stats = beverage_data.describe()
print(desc_stats)
Indicator Company Year Value Category \
count 4558.000000 4558.000000 4558.000000 4.558000e+03 4558.000000
mean 110.351251 3.969724 2020.985739 -6.235566e-18 2.412023
std 64.982300 2.566039 0.818654 1.000110e+00 1.621816
min 0.000000 0.000000 2020.000000 -6.833770e+00 0.000000
25% 53.000000 2.000000 2020.000000 -1.785277e-01 1.000000
50% 115.000000 4.000000 2021.000000 -1.751401e-01 3.000000
75% 165.000000 6.000000 2022.000000 -1.541201e-01 4.000000
max 219.000000 8.000000 2022.000000 1.821011e+01 6.000000
Category 2
count 4558.000000
mean 11.661913
std 7.359719
min 0.000000
25% 5.000000
50% 11.000000
75% 20.000000
max 22.000000
In [ ]:
# Convert 'Value' column to numeric data type (ensuring any errors are coerced to NaNs)
beverage_data['Value'] = pd.to_numeric(beverage_data['Value'], errors='coerce')
# Drop rows with NaN values in 'Value' column if any exist after conversion
beverage_data = beverage_data.dropna(subset=['Value'])
In [ ]:
# Verify the unique values in the 'Indicator' column to find the exact string for Quick Ratio
unique_indicators = beverage_data['Indicator'].unique()
print("Unique indicators:", unique_indicators)
Unique indicators: [ 4 5 6 7 8 14 22 29 46 58 9 10 11 12 13 18 19 20 21 24 25 26 28 31 32 33 34 42 43 45 47 48 49 50 52 53 54 55 57 59 61 65 66 68 69 70 72 75 76 78 79 80 84 85 88 89 90 91 92 93 94 96 97 141 142 143 144 145 146 147 148 149 150 151 152 216 217 218 99 100 105 106 219 210 211 0 1 2 3 15 16 17 23 27 30 35 36 37 38 39 40 41 44 51 56 60 62 63 64 67 71 73 74 77 81 82 83 86 87 95 98 101 102 103 104 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 131 132 129 130 133 134 135 136 137 138 139 140 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 178 179 177 180 181 182 183 184 185 186 187 188 189 192 193 194 195 196 197 198 199 200 201 202 190 191 203 204 205 206 207 208 209 212 213 214 215]
In [ ]:
# Use the exact string value for 'Debt to Equity' found in the unique values
debt_to_equity_indicator = 'Debt to equity' # Replace this with the correct string if different
# Filter data for Debt to Equity Ratio
debt_to_equity = beverage_data[beverage_data['Indicator'] == debt_to_equity_indicator].copy()
print("Filtered Data:")
print(debt_to_equity)
# Create a new column 'Company Group' to categorize SABECO and other companies
debt_to_equity['Company Group'] = debt_to_equity['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = debt_to_equity.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
debt_to_equity_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(debt_to_equity_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(debt_to_equity_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [[p + width*i for p in positions] for i in range(len(debt_to_equity_pivot.columns))]
for i, (year, year_position) in enumerate(zip(debt_to_equity_pivot.columns, year_positions)):
bars = ax.bar(year_position, debt_to_equity_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height, f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('D/E Ratio')
ax.set_title('Debt to Equity Ratio')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(debt_to_equity_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('debt_to_equity.png')
# Show the plot
plt.show()
Filtered Data: Empty DataFrame Columns: [Indicator, Company, Year, Value, Category, Category 2] Index: [] Aggregated Data: Empty DataFrame Columns: [Company Group, Year, Value] Index: [] Pivoted Data: Empty DataFrame Columns: [] Index: []
/var/folders/57/3tgq502n7y31r_7lgrpyp6cc0000gn/T/ipykernel_1810/245919589.py:45: UserWarning: No artists with labels found to put in legend. Note that artists whose label start with an underscore are ignored when legend() is called with no argument. ax.legend()
In [ ]:
# Use the exact string value for 'Quick Ratio' found in the unique values
quick_ratio_indicator = 'Quick ratio' # Replace this with the correct string if different
# Filter data for Quick Ratio
quick_ratio = beverage_data[beverage_data['Indicator'] == quick_ratio_indicator].copy()
print("Filtered Data:")
print(quick_ratio)
# Create a new column 'Company Group' to categorize SABECO and other companies
quick_ratio['Company Group'] = quick_ratio['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = quick_ratio.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
quick_ratio_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(quick_ratio_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(quick_ratio_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(quick_ratio_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(quick_ratio_pivot.columns, year_positions)):
bars = ax.bar(year_position, quick_ratio_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('Quick Ratio')
ax.set_title('Quick Ratio')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(quick_ratio_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('quick_ratio.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category Category 2
4077 Quick ratio BHN 2020 2.24 Ratios Liquidity ratios
4078 Quick ratio BHN 2021 1.80 Ratios Liquidity ratios
4079 Quick ratio BHN 2022 2.27 Ratios Liquidity ratios
4080 Quick ratio SAB 2020 3.49 Ratios Liquidity ratios
4081 Quick ratio SAB 2021 2.92 Ratios Liquidity ratios
4082 Quick ratio SAB 2022 2.68 Ratios Liquidity ratios
4083 Quick ratio SCD 2020 3.67 Ratios Liquidity ratios
4084 Quick ratio SCD 2021 1.99 Ratios Liquidity ratios
4085 Quick ratio SCD 2022 0.39 Ratios Liquidity ratios
4086 Quick ratio SMB 2020 0.56 Ratios Liquidity ratios
4087 Quick ratio SMB 2021 0.85 Ratios Liquidity ratios
4088 Quick ratio SMB 2022 0.88 Ratios Liquidity ratios
4089 Quick ratio VCF 2020 2.20 Ratios Liquidity ratios
4090 Quick ratio VCF 2021 1.87 Ratios Liquidity ratios
4091 Quick ratio VCF 2022 3.19 Ratios Liquidity ratios
4092 Quick ratio HAD 2020 2.76 Ratios Liquidity ratios
4093 Quick ratio HAD 2021 2.28 Ratios Liquidity ratios
4094 Quick ratio HAD 2022 2.80 Ratios Liquidity ratios
4095 Quick ratio THB 2020 1.09 Ratios Liquidity ratios
4096 Quick ratio THB 2021 1.33 Ratios Liquidity ratios
4097 Quick ratio THB 2022 1.25 Ratios Liquidity ratios
4098 Quick ratio VDL 2020 4.59 Ratios Liquidity ratios
4099 Quick ratio VDL 2021 1.76 Ratios Liquidity ratios
4100 Quick ratio VDL 2022 1.78 Ratios Liquidity ratios
4101 Quick ratio HAT 2020 0.89 Ratios Liquidity ratios
4102 Quick ratio HAT 2021 1.00 Ratios Liquidity ratios
4103 Quick ratio HAT 2022 1.25 Ratios Liquidity ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 2.25000
1 Beverage Manufacturing 2021 1.61000
2 Beverage Manufacturing 2022 1.72625
3 SABECO 2020 3.49000
4 SABECO 2021 2.92000
5 SABECO 2022 2.68000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 2.25 1.61 1.72625
SABECO 3.49 2.92 2.68000
In [ ]:
# Inspect unique values in 'Indicator' column to find the correct string for 'Interest Coverage'
unique_indicators = beverage_data['Indicator'].unique()
print("Unique indicators:", unique_indicators)
# Use the exact string value for 'Interest Coverage' found in the unique values
interest_coverage_indicator = 'Interest coverage' # Replace this with the correct string if different
# Filter data for Interest Coverage
interest_coverage = beverage_data[beverage_data['Indicator'] == interest_coverage_indicator].copy()
print("Filtered Data:")
print(interest_coverage)
# Create a new column 'Company Group' to categorize SABECO and other companies
interest_coverage['Company Group'] = interest_coverage['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = interest_coverage.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
interest_coverage_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(interest_coverage_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(interest_coverage_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(interest_coverage_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(interest_coverage_pivot.columns, year_positions)):
bars = ax.bar(year_position, interest_coverage_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('Interest Coverage')
ax.set_title('Interest Coverage')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(interest_coverage_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('interest_coverage.png')
# Show the plot
plt.show()
Unique indicators: ['- Accumulated depreciation'
'- Accumulated retained earning at the end of the previous period'
'- Common stock with voting right' '- Cost'
'- Undistributed earnings in this period' "1. Owner's capital"
'1. Tangible fixed assets' '11. Undistributed earnings after tax'
'2. Financial leased fixed assets' '3. Intangible fixed assets' '1. Cash'
'1. Inventories' '1. Long-term prepayments' '1. Long-term trade payables'
'1. Long-term trade receivables' '1. Short-term prepayments'
'1. Short-term trade accounts payable'
'1. Short-term trade accounts receivable'
'1. Subsidized not-for-profit funds'
"10. Other funds from owner's equity"
'10. Short-term borrowings and financial leases'
'11. Deferred income tax liabilities'
'11. Provision for short-term liabilities'
'12. Provision for long-term liabilities' '12.. Bonus and welfare fund'
'13. Fund for technology development' "13. Minority's interest"
'2. Cash equivalents' '2. Construction in progress'
'2. Deferred income tax assets' '2. Funds invested in fixed assets'
'2. Investments in associates, joint-ventures'
'2. Long-term prepayments to suppliers'
'2. Provision for decline in value of inventories' '2. Share premium'
'2. Short-term advances from customers'
'2. Short-term prepayments to suppliers'
'2. Value added tax to be reclaimed' '3. Held to maturity investments'
'3. Investments in other entities'
'3. Long-term equipment, supplies, spare parts'
'3. Taxes and other payables to state authorities'
'3. Taxes and other receivables from state authorities'
'4. Inter-company payables on business capital'
'4. Other capital of owners' '4. Payable to employees'
'4. Provision for diminution in value of long-term investments'
'5. Held to maturity investments' '5. Long-term loan receivables'
'5. Short-term acrrued expenses' '5. Short-term loan receivables'
'5. Treasury shares' '6. Other long-term receivables'
'6. Other short-term receivables' '7. Foreign exchange differences'
'7. Other long-term liabilities'
'7. Provision for long-term doubtful debts'
'7. Provision for short-term doubtful debts (*)'
'8. Assets awaiting resolution' '8. Investment and development fund'
'8. Long-term borrowings and financial leases'
'8. Short-term unearned revenue' '9. Other short-term payables'
'I. Cash and cash equivalents' 'I. Long-term receivables'
"I. Owner's equity" 'I. Short -term liabilities' 'II. Fixed assets'
'II. Long-term liabilities' 'II. Other resources and funds'
'II. Short-term financial investments' 'III. Investment properties'
'III. Short-term receivables' 'IV. Inventories'
'IV. Long-term assets in progress' 'V. Long-term financial investments'
'V. Other short-term assets' 'VI. Other long-term assets'
'A. LIABILITIES' 'A. SHORT-TERM ASSETS' 'B. LONG-TERM ASSETS'
"B. OWNER'S EQUITY" 'VII. Goodwill' 'TOTAL ASSETS'
"TOTAL OWNER'S EQUITY AND LIABILITIES"
'(Increase)/decrease in inventories'
'(Increase)/decrease in prepaid expenses'
'(Increase)/decrease in receivables'
'(Reversal of provisions)/provisions'
'1. Payment for fixed assets, constructions and other long-term assets'
'1. Profit before tax' '1. Revenue'
'10. General and administrative expenses' '11. Operating profit'
'12. Other income' '13.Other expenses' '14. Other profit'
'15. Profit before tax' '16. Current corporate income tax expenses'
'17. Deferred income tax expenses (*)' '18. Net profit after tax'
'19. Earnings per share' '2. Deduction from revenue'
'2. Receipts from disposal of fixed assets and other long-term assets'
'20. Diluted earnings per share'
"3. Loans, purchases of other entities' debt instruments"
'3. Net revenue' '3. Operating profit before changes in working capital'
'3. Proceeds from borrowings' '4. Cost of goods sold'
'4. Principal repayments'
"4. Receipts from loan repayments, sale of other entities' debt instruments"
'5. Gross profit' '5. Repayment of financial leases'
'6. Collections on investment in other entities'
'6. Dividends paid, profits distributed to owners' '6. Financial income'
'7. Dividends, interest and profit received' '7. Financial expenses'
"8. Share of associates and joint ventures' result" '9. Selling expenses'
'Accrual ratio (Balance sheet method)' 'Accrual ratio (Cash flow method)'
'Accrual ratio CF' 'Adjustments for' 'Beta' 'Book value per share (BVPS)'
'Cash and cash equivalents at beginning of the period'
'Cash and cash equivalents at end of the period'
'Cash flow per share (CPS)' 'Cash ratio' 'Cash return on equity'
'Cash return to assets' 'Cash to income' 'Cash/Short-term assets'
'Construction in progress/Fixed assets' 'Corporate income tax paid'
'Cost of goods sold/Net revenue' 'Days of inventory on hand'
'Days of sales outstanding' 'Debt coverage' 'Debt to assets'
'Debt to equity' 'Depreciation of fixed assets and properties investment'
'Dividend yield' 'EBIT margin' 'EBITDA/Net revenue' 'Equity to assets'
'Equity turnover' 'EV/EBIT' 'EV/EBITDA'
'Exchange difference due to re-valuation of ending balances'
'Finance lease/Fixed assets' 'Fixed asset turnover'
'Fixed assets/Total assets'
'Foreign exchange (gain)/loss from revaluation of monetary items denominated in foreign currencies'
'General and Administrative expenses/Net revenue' 'Gross profit'
'Gross profit margin'
'Increase/(decrease) in payables (other than interest, corporate income tax)'
'Intangible fixed assets/Fixed assets' 'Interest coverage'
'Interest expense' 'Interest expenses/Net revenue' 'Interest paid'
'Inventory turnover' 'Inventory/Short-term assets' 'Liabilities'
'Liabilities to assets' 'Liabilities to equity'
'Long-term assets/Total assets' 'Long-term liabilities'
'Loss/(profit) from investment activities' "Minority's interest"
'Net cash flows during the period'
'Net cash flows from financing activities'
'Net cash flows from investing activities'
'Net cash flows from operating activities'
'Net cash flows/Short -term liabilities' 'Net profit margin'
'Net revenue' 'Number of days of payables' 'Of which: Interest expenses'
'Other payments for operating activities'
'Other receipts from operating activities'
'Other Short-term assets/Short-term assets' "Owner's equity" 'P/B' 'P/E'
'P/S' 'Payables turnover'
'Profit after tax for shareholders of parent company'
'Profit after tax for shareholders of the parent company'
'Profit before tax' 'Quick ratio'
'Quick ratio (except: Inventories, Short-term receivables - reference)'
'Ratios - BHN' 'Ratios - HAD' 'Ratios - HAT' 'Ratios - SAB'
'Ratios - SCD' 'Ratios - SMB' 'Ratios - THB' 'Ratios - VCF'
'Ratios - VDL' 'Receivables turnover' 'Return on capital employed (ROCE)'
'ROA' 'ROE' 'Selling expenses/Net revenue'
'Short-term assets/Total assets'
'Short-term investments/Short-term assets'
'Short-term liabilities to equity'
'Short-term liabilities to total liabilities' 'Short-term ratio'
'Short-term receivables/Short-term assets'
'Tangible fixed assets/Fixed assets' 'Total asset turnover'
'Total assets' 'Trailing EPS']
Filtered Data:
Indicator Company Year Value Category Category 2
3284 Interest coverage BHN 2020 34.76 Ratios Liquidity ratios
3285 Interest coverage BHN 2021 32.47 Ratios Liquidity ratios
3286 Interest coverage BHN 2022 73.35 Ratios Liquidity ratios
3287 Interest coverage SAB 2020 96.98 Ratios Liquidity ratios
3288 Interest coverage SAB 2021 100.65 Ratios Liquidity ratios
3289 Interest coverage SAB 2022 150.69 Ratios Liquidity ratios
3290 Interest coverage SCD 2020 2.49 Ratios Liquidity ratios
3291 Interest coverage SCD 2021 -1.49 Ratios Liquidity ratios
3292 Interest coverage SCD 2022 -2.13 Ratios Liquidity ratios
3293 Interest coverage SMB 2020 59.91 Ratios Liquidity ratios
3294 Interest coverage SMB 2021 67.88 Ratios Liquidity ratios
3295 Interest coverage SMB 2022 53.07 Ratios Liquidity ratios
3296 Interest coverage VCF 2020 70.70 Ratios Liquidity ratios
3297 Interest coverage VCF 2021 109.30 Ratios Liquidity ratios
3298 Interest coverage VCF 2022 61.55 Ratios Liquidity ratios
3299 Interest coverage HAD 2020 0.00 Ratios Liquidity ratios
3300 Interest coverage HAD 2021 0.00 Ratios Liquidity ratios
3301 Interest coverage HAD 2022 0.00 Ratios Liquidity ratios
3302 Interest coverage THB 2020 0.00 Ratios Liquidity ratios
3303 Interest coverage THB 2021 837.21 Ratios Liquidity ratios
3304 Interest coverage THB 2022 393.58 Ratios Liquidity ratios
3305 Interest coverage VDL 2020 24.84 Ratios Liquidity ratios
3306 Interest coverage VDL 2021 12.08 Ratios Liquidity ratios
3307 Interest coverage VDL 2022 -20.08 Ratios Liquidity ratios
3308 Interest coverage HAT 2020 0.00 Ratios Liquidity ratios
3309 Interest coverage HAT 2021 0.00 Ratios Liquidity ratios
3310 Interest coverage HAT 2022 0.00 Ratios Liquidity ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 24.08750
1 Beverage Manufacturing 2021 132.18125
2 Beverage Manufacturing 2022 69.91750
3 SABECO 2020 96.98000
4 SABECO 2021 100.65000
5 SABECO 2022 150.69000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 24.0875 132.18125 69.9175
SABECO 96.9800 100.65000 150.6900
In [ ]:
# Use the exact string value for 'Number of Days of Payables' found in the unique values
days_payables_indicator = 'Number of days of payables' # Replace this with the correct string if different
# Filter data for Number of Days of Payables
days_payables = beverage_data[beverage_data['Indicator'] == days_payables_indicator].copy()
print("Filtered Data:")
print(days_payables)
# Create a new column 'Company Group' to categorize SABECO and other companies
days_payables['Company Group'] = days_payables['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = days_payables.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
days_payables_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(days_payables_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(days_payables_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(days_payables_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(days_payables_pivot.columns, year_positions)):
bars = ax.bar(year_position, days_payables_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('Days')
ax.set_title('Number of Days of Payables')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(days_payables_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('days_payables.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category \
3767 Number of days of payables BHN 2020 45.12 Ratios
3768 Number of days of payables BHN 2021 32.87 Ratios
3769 Number of days of payables BHN 2022 28.97 Ratios
3770 Number of days of payables SAB 2020 38.26 Ratios
3771 Number of days of payables SAB 2021 39.42 Ratios
3772 Number of days of payables SAB 2022 38.95 Ratios
3773 Number of days of payables SCD 2020 72.41 Ratios
3774 Number of days of payables SCD 2021 86.88 Ratios
3775 Number of days of payables SCD 2022 157.82 Ratios
3776 Number of days of payables SMB 2020 8.57 Ratios
3777 Number of days of payables SMB 2021 7.62 Ratios
3778 Number of days of payables SMB 2022 8.96 Ratios
3779 Number of days of payables VCF 2020 41.54 Ratios
3780 Number of days of payables VCF 2021 44.40 Ratios
3781 Number of days of payables VCF 2022 38.73 Ratios
3782 Number of days of payables HAD 2020 2.86 Ratios
3783 Number of days of payables HAD 2021 3.28 Ratios
3784 Number of days of payables HAD 2022 2.15 Ratios
3785 Number of days of payables THB 2020 5.24 Ratios
3786 Number of days of payables THB 2021 11.62 Ratios
3787 Number of days of payables THB 2022 10.94 Ratios
3788 Number of days of payables VDL 2020 12.79 Ratios
3789 Number of days of payables VDL 2021 7.39 Ratios
3790 Number of days of payables VDL 2022 6.59 Ratios
3791 Number of days of payables HAT 2020 5.08 Ratios
3792 Number of days of payables HAT 2021 6.85 Ratios
3793 Number of days of payables HAT 2022 3.62 Ratios
Category 2
3767 Efficiency ratios
3768 Efficiency ratios
3769 Efficiency ratios
3770 Efficiency ratios
3771 Efficiency ratios
3772 Efficiency ratios
3773 Efficiency ratios
3774 Efficiency ratios
3775 Efficiency ratios
3776 Efficiency ratios
3777 Efficiency ratios
3778 Efficiency ratios
3779 Efficiency ratios
3780 Efficiency ratios
3781 Efficiency ratios
3782 Efficiency ratios
3783 Efficiency ratios
3784 Efficiency ratios
3785 Efficiency ratios
3786 Efficiency ratios
3787 Efficiency ratios
3788 Efficiency ratios
3789 Efficiency ratios
3790 Efficiency ratios
3791 Efficiency ratios
3792 Efficiency ratios
3793 Efficiency ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 24.20125
1 Beverage Manufacturing 2021 25.11375
2 Beverage Manufacturing 2022 32.22250
3 SABECO 2020 38.26000
4 SABECO 2021 39.42000
5 SABECO 2022 38.95000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 24.20125 25.11375 32.2225
SABECO 38.26000 39.42000 38.9500
In [ ]:
# Use the exact string value for 'Days of Inventory on Hand' found in the unique values
days_inventory_indicator = 'Days of inventory on hand' # Replace this with the correct string if different
# Filter data for Days of Inventory on Hand
days_inventory = beverage_data[beverage_data['Indicator'] == days_inventory_indicator].copy()
print("Filtered Data:")
print(days_inventory)
# Create a new column 'Company Group' to categorize SABECO and other companies
days_inventory['Company Group'] = days_inventory['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = days_inventory.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
days_inventory_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(days_inventory_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(days_inventory_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(days_inventory_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(days_inventory_pivot.columns, year_positions)):
bars = ax.bar(year_position, days_inventory_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('Days')
ax.set_title('Days of Inventory on Hand')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(days_inventory_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('days_inventory.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category \
2766 Days of inventory on hand BHN 2020 41.21 Ratios
2767 Days of inventory on hand BHN 2021 39.94 Ratios
2768 Days of inventory on hand BHN 2022 38.30 Ratios
2769 Days of inventory on hand SAB 2020 32.02 Ratios
2770 Days of inventory on hand SAB 2021 30.29 Ratios
2771 Days of inventory on hand SAB 2022 29.11 Ratios
2772 Days of inventory on hand SCD 2020 78.23 Ratios
2773 Days of inventory on hand SCD 2021 97.78 Ratios
2774 Days of inventory on hand SCD 2022 63.14 Ratios
2775 Days of inventory on hand SMB 2020 74.92 Ratios
2776 Days of inventory on hand SMB 2021 75.50 Ratios
2777 Days of inventory on hand SMB 2022 79.83 Ratios
2778 Days of inventory on hand VCF 2020 40.47 Ratios
2779 Days of inventory on hand VCF 2021 51.62 Ratios
2780 Days of inventory on hand VCF 2022 64.56 Ratios
2781 Days of inventory on hand HAD 2020 63.38 Ratios
2782 Days of inventory on hand HAD 2021 71.70 Ratios
2783 Days of inventory on hand HAD 2022 68.37 Ratios
2784 Days of inventory on hand THB 2020 15.81 Ratios
2785 Days of inventory on hand THB 2021 15.28 Ratios
2786 Days of inventory on hand THB 2022 12.44 Ratios
2787 Days of inventory on hand VDL 2020 166.26 Ratios
2788 Days of inventory on hand VDL 2021 161.49 Ratios
2789 Days of inventory on hand VDL 2022 244.94 Ratios
2790 Days of inventory on hand HAT 2020 1.41 Ratios
2791 Days of inventory on hand HAT 2021 3.55 Ratios
2792 Days of inventory on hand HAT 2022 1.37 Ratios
Category 2
2766 Efficiency ratios
2767 Efficiency ratios
2768 Efficiency ratios
2769 Efficiency ratios
2770 Efficiency ratios
2771 Efficiency ratios
2772 Efficiency ratios
2773 Efficiency ratios
2774 Efficiency ratios
2775 Efficiency ratios
2776 Efficiency ratios
2777 Efficiency ratios
2778 Efficiency ratios
2779 Efficiency ratios
2780 Efficiency ratios
2781 Efficiency ratios
2782 Efficiency ratios
2783 Efficiency ratios
2784 Efficiency ratios
2785 Efficiency ratios
2786 Efficiency ratios
2787 Efficiency ratios
2788 Efficiency ratios
2789 Efficiency ratios
2790 Efficiency ratios
2791 Efficiency ratios
2792 Efficiency ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 60.21125
1 Beverage Manufacturing 2021 64.60750
2 Beverage Manufacturing 2022 71.61875
3 SABECO 2020 32.02000
4 SABECO 2021 30.29000
5 SABECO 2022 29.11000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 60.21125 64.6075 71.61875
SABECO 32.02000 30.2900 29.11000
In [ ]:
# Filter data for Days of Sale Outstanding
days_sales_outstanding_indicator = 'Days of sales outstanding' # Ensure this matches the exact string in your dataset
# Filter data for Days of Sale Outstanding
days_sales_outstanding = beverage_data[beverage_data['Indicator'] == days_sales_outstanding_indicator].copy()
print("Filtered Data:")
print(days_sales_outstanding)
# Create a new column 'Company Group' to categorize SABECO and other companies
days_sales_outstanding['Company Group'] = days_sales_outstanding['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = days_sales_outstanding.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
days_sales_outstanding_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(days_sales_outstanding_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(days_sales_outstanding_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(days_sales_outstanding_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(days_sales_outstanding_pivot.columns, year_positions)):
bars = ax.bar(year_position, days_sales_outstanding_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('Days')
ax.set_title('Days of Sale Outstanding')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(days_sales_outstanding_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('days_sales_outstanding.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category \
2793 Days of sales outstanding BHN 2020 8.83 Ratios
2794 Days of sales outstanding BHN 2021 11.04 Ratios
2795 Days of sales outstanding BHN 2022 12.49 Ratios
2796 Days of sales outstanding SAB 2020 1.48 Ratios
2797 Days of sales outstanding SAB 2021 1.18 Ratios
2798 Days of sales outstanding SAB 2022 2.29 Ratios
2799 Days of sales outstanding SCD 2020 25.43 Ratios
2800 Days of sales outstanding SCD 2021 40.62 Ratios
2801 Days of sales outstanding SCD 2022 56.56 Ratios
2802 Days of sales outstanding SMB 2020 16.94 Ratios
2803 Days of sales outstanding SMB 2021 12.74 Ratios
2804 Days of sales outstanding SMB 2022 7.55 Ratios
2805 Days of sales outstanding VCF 2020 90.31 Ratios
2806 Days of sales outstanding VCF 2021 29.02 Ratios
2807 Days of sales outstanding VCF 2022 47.07 Ratios
2808 Days of sales outstanding HAD 2020 0.98 Ratios
2809 Days of sales outstanding HAD 2021 0.12 Ratios
2810 Days of sales outstanding THB 2020 10.45 Ratios
2811 Days of sales outstanding THB 2021 14.33 Ratios
2812 Days of sales outstanding THB 2022 17.70 Ratios
2813 Days of sales outstanding VDL 2020 23.73 Ratios
2814 Days of sales outstanding VDL 2021 34.37 Ratios
2815 Days of sales outstanding VDL 2022 35.32 Ratios
2816 Days of sales outstanding HAT 2020 0.16 Ratios
2817 Days of sales outstanding HAT 2021 1.49 Ratios
2818 Days of sales outstanding HAT 2022 0.83 Ratios
Category 2
2793 Efficiency ratios
2794 Efficiency ratios
2795 Efficiency ratios
2796 Efficiency ratios
2797 Efficiency ratios
2798 Efficiency ratios
2799 Efficiency ratios
2800 Efficiency ratios
2801 Efficiency ratios
2802 Efficiency ratios
2803 Efficiency ratios
2804 Efficiency ratios
2805 Efficiency ratios
2806 Efficiency ratios
2807 Efficiency ratios
2808 Efficiency ratios
2809 Efficiency ratios
2810 Efficiency ratios
2811 Efficiency ratios
2812 Efficiency ratios
2813 Efficiency ratios
2814 Efficiency ratios
2815 Efficiency ratios
2816 Efficiency ratios
2817 Efficiency ratios
2818 Efficiency ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 22.10375
1 Beverage Manufacturing 2021 17.96625
2 Beverage Manufacturing 2022 25.36000
3 SABECO 2020 1.48000
4 SABECO 2021 1.18000
5 SABECO 2022 2.29000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 22.10375 17.96625 25.36
SABECO 1.48000 1.18000 2.29
In [ ]:
# Filter data for Return on Assets (ROA)
roa_indicator = 'ROA' # Ensure this matches the exact string in your dataset
# Filter data for ROA
roa = beverage_data[beverage_data['Indicator'] == roa_indicator].copy()
print("Filtered Data:")
print(roa)
# Create a new column 'Company Group' to categorize SABECO and other companies
roa['Company Group'] = roa['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = roa.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
roa_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(roa_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(roa_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(roa_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(roa_pivot.columns, year_positions)):
bars = ax.bar(year_position, roa_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('ROA')
ax.set_title('Return on Assets (ROA)')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(roa_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('roa.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category Category 2
4210 ROA BHN 2020 8.49 Ratios Profitability ratios
4211 ROA BHN 2021 4.12 Ratios Profitability ratios
4212 ROA BHN 2022 6.46 Ratios Profitability ratios
4213 ROA SAB 2020 17.39 Ratios Profitability ratios
4214 ROA SAB 2021 12.71 Ratios Profitability ratios
4215 ROA SAB 2022 16.09 Ratios Profitability ratios
4216 ROA SCD 2020 1.04 Ratios Profitability ratios
4217 ROA SCD 2021 -9.12 Ratios Profitability ratios
4218 ROA SCD 2022 -10.05 Ratios Profitability ratios
4219 ROA SMB 2020 19.83 Ratios Profitability ratios
4220 ROA SMB 2021 18.23 Ratios Profitability ratios
4221 ROA SMB 2022 19.29 Ratios Profitability ratios
4222 ROA VCF 2020 33.21 Ratios Profitability ratios
4223 ROA VCF 2021 21.03 Ratios Profitability ratios
4224 ROA VCF 2022 15.75 Ratios Profitability ratios
4225 ROA HAD 2020 8.50 Ratios Profitability ratios
4226 ROA HAD 2021 3.45 Ratios Profitability ratios
4227 ROA HAD 2022 11.43 Ratios Profitability ratios
4228 ROA THB 2020 1.03 Ratios Profitability ratios
4229 ROA THB 2021 1.84 Ratios Profitability ratios
4230 ROA THB 2022 3.21 Ratios Profitability ratios
4231 ROA VDL 2020 4.73 Ratios Profitability ratios
4232 ROA VDL 2021 1.43 Ratios Profitability ratios
4233 ROA VDL 2022 -7.31 Ratios Profitability ratios
4234 ROA HAT 2020 5.64 Ratios Profitability ratios
4235 ROA HAT 2021 0.74 Ratios Profitability ratios
4236 ROA HAT 2022 12.18 Ratios Profitability ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 10.30875
1 Beverage Manufacturing 2021 5.21500
2 Beverage Manufacturing 2022 6.37000
3 SABECO 2020 17.39000
4 SABECO 2021 12.71000
5 SABECO 2022 16.09000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 10.30875 5.215 6.37
SABECO 17.39000 12.710 16.09
In [ ]:
# Filter data for Return on Equity (ROE)
roe_indicator = 'ROE' # Ensure this matches the exact string in your dataset
# Filter data for ROE
roe = beverage_data[beverage_data['Indicator'] == roe_indicator].copy()
print("Filtered Data:")
print(roe)
# Create a new column 'Company Group' to categorize SABECO and other companies
roe['Company Group'] = roe['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = roe.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
roe_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(roe_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(roe_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(roe_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(roe_pivot.columns, year_positions)):
bars = ax.bar(year_position, roe_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('ROE')
ax.set_title('Return on Equity (ROE)')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(roe_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('roe.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category Category 2
4237 ROE BHN 2020 12.02 Ratios Profitability ratios
4238 ROE BHN 2021 5.76 Ratios Profitability ratios
4239 ROE BHN 2022 9.13 Ratios Profitability ratios
4240 ROE SAB 2020 22.88 Ratios Profitability ratios
4241 ROE SAB 2021 16.79 Ratios Profitability ratios
4242 ROE SAB 2022 22.14 Ratios Profitability ratios
4243 ROE SCD 2020 1.76 Ratios Profitability ratios
4244 ROE SCD 2021 -20.36 Ratios Profitability ratios
4245 ROE SCD 2022 -36.92 Ratios Profitability ratios
4246 ROE SMB 2020 32.84 Ratios Profitability ratios
4247 ROE SMB 2021 30.25 Ratios Profitability ratios
4248 ROE SMB 2022 31.83 Ratios Profitability ratios
4249 ROE VCF 2020 49.22 Ratios Profitability ratios
4250 ROE VCF 2021 31.06 Ratios Profitability ratios
4251 ROE VCF 2022 21.82 Ratios Profitability ratios
4252 ROE HAD 2020 10.87 Ratios Profitability ratios
4253 ROE HAD 2021 4.23 Ratios Profitability ratios
4254 ROE HAD 2022 14.20 Ratios Profitability ratios
4255 ROE THB 2020 2.08 Ratios Profitability ratios
4256 ROE THB 2021 3.78 Ratios Profitability ratios
4257 ROE THB 2022 6.59 Ratios Profitability ratios
4258 ROE VDL 2020 5.78 Ratios Profitability ratios
4259 ROE VDL 2021 1.58 Ratios Profitability ratios
4260 ROE VDL 2022 -8.19 Ratios Profitability ratios
4261 ROE HAT 2020 13.67 Ratios Profitability ratios
4262 ROE HAT 2021 1.86 Ratios Profitability ratios
4263 ROE HAT 2022 31.24 Ratios Profitability ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 16.0300
1 Beverage Manufacturing 2021 7.2700
2 Beverage Manufacturing 2022 8.7125
3 SABECO 2020 22.8800
4 SABECO 2021 16.7900
5 SABECO 2022 22.1400
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 16.03 7.27 8.7125
SABECO 22.88 16.79 22.1400
In [ ]:
# Filter data for Gross Profit Margin
gross_profit_margin_indicator = 'Gross profit margin' # Ensure this matches the exact string in your dataset
# Filter data for Gross Profit Margin
gross_profit_margin = beverage_data[beverage_data['Indicator'] == gross_profit_margin_indicator].copy()
print("Filtered Data:")
print(gross_profit_margin)
# Create a new column 'Company Group' to categorize SABECO and other companies
gross_profit_margin['Company Group'] = gross_profit_margin['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = gross_profit_margin.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
gross_profit_margin_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(gross_profit_margin_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(gross_profit_margin_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(gross_profit_margin_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(gross_profit_margin_pivot.columns, year_positions)):
bars = ax.bar(year_position, gross_profit_margin_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('Gross Profit Margin')
ax.set_title('Gross Profit Margin')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(gross_profit_margin_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('gross_profit_margin.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category Category 2
3209 Gross profit margin BHN 2020 26.56 Ratios Profitability ratios
3210 Gross profit margin BHN 2021 24.40 Ratios Profitability ratios
3211 Gross profit margin BHN 2022 27.54 Ratios Profitability ratios
3212 Gross profit margin SAB 2020 30.40 Ratios Profitability ratios
3213 Gross profit margin SAB 2021 28.85 Ratios Profitability ratios
3214 Gross profit margin SAB 2022 30.79 Ratios Profitability ratios
3215 Gross profit margin SCD 2020 24.35 Ratios Profitability ratios
3216 Gross profit margin SCD 2021 15.06 Ratios Profitability ratios
3217 Gross profit margin SCD 2022 18.60 Ratios Profitability ratios
3218 Gross profit margin SMB 2020 25.78 Ratios Profitability ratios
3219 Gross profit margin SMB 2021 26.45 Ratios Profitability ratios
3220 Gross profit margin SMB 2022 26.90 Ratios Profitability ratios
3221 Gross profit margin VCF 2020 30.69 Ratios Profitability ratios
3222 Gross profit margin VCF 2021 25.06 Ratios Profitability ratios
3223 Gross profit margin VCF 2022 22.37 Ratios Profitability ratios
3224 Gross profit margin HAD 2020 26.04 Ratios Profitability ratios
3225 Gross profit margin HAD 2021 22.88 Ratios Profitability ratios
3226 Gross profit margin HAD 2022 27.91 Ratios Profitability ratios
3227 Gross profit margin THB 2020 13.67 Ratios Profitability ratios
3228 Gross profit margin THB 2021 13.17 Ratios Profitability ratios
3229 Gross profit margin THB 2022 9.74 Ratios Profitability ratios
3230 Gross profit margin VDL 2020 9.45 Ratios Profitability ratios
3231 Gross profit margin VDL 2021 7.63 Ratios Profitability ratios
3232 Gross profit margin VDL 2022 3.16 Ratios Profitability ratios
3233 Gross profit margin HAT 2020 5.95 Ratios Profitability ratios
3234 Gross profit margin HAT 2021 5.06 Ratios Profitability ratios
3235 Gross profit margin HAT 2022 7.41 Ratios Profitability ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 20.31125
1 Beverage Manufacturing 2021 17.46375
2 Beverage Manufacturing 2022 17.95375
3 SABECO 2020 30.40000
4 SABECO 2021 28.85000
5 SABECO 2022 30.79000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 20.31125 17.46375 17.95375
SABECO 30.40000 28.85000 30.79000
In [ ]:
# Filter data for Net Profit Margin
net_profit_margin_indicator = 'Net profit margin' # Ensure this matches the exact string in your dataset
# Filter data for Net Profit Margin
net_profit_margin = beverage_data[beverage_data['Indicator'] == net_profit_margin_indicator].copy()
print("Filtered Data:")
print(net_profit_margin)
# Create a new column 'Company Group' to categorize SABECO and other companies
net_profit_margin['Company Group'] = net_profit_margin['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = net_profit_margin.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
net_profit_margin_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(net_profit_margin_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(net_profit_margin_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(net_profit_margin_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(net_profit_margin_pivot.columns, year_positions)):
bars = ax.bar(year_position, net_profit_margin_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('Net Profit Margin')
ax.set_title('Net Profit Margin')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(net_profit_margin_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('net_profit_margin.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category Category 2
3713 Net profit margin BHN 2020 8.86 Ratios Profitability ratios
3714 Net profit margin BHN 2021 4.66 Ratios Profitability ratios
3715 Net profit margin BHN 2022 5.99 Ratios Profitability ratios
3716 Net profit margin SAB 2020 17.66 Ratios Profitability ratios
3717 Net profit margin SAB 2021 14.90 Ratios Profitability ratios
3718 Net profit margin SAB 2022 15.72 Ratios Profitability ratios
3719 Net profit margin SCD 2020 2.11 Ratios Profitability ratios
3720 Net profit margin SCD 2021 -27.02 Ratios Profitability ratios
3721 Net profit margin SCD 2022 -28.80 Ratios Profitability ratios
3722 Net profit margin SMB 2020 13.16 Ratios Profitability ratios
3723 Net profit margin SMB 2021 13.32 Ratios Profitability ratios
3724 Net profit margin SMB 2022 13.32 Ratios Profitability ratios
3725 Net profit margin VCF 2020 24.85 Ratios Profitability ratios
3726 Net profit margin VCF 2021 19.33 Ratios Profitability ratios
3727 Net profit margin VCF 2022 14.46 Ratios Profitability ratios
3728 Net profit margin HAD 2020 5.50 Ratios Profitability ratios
3729 Net profit margin HAD 2021 2.49 Ratios Profitability ratios
3730 Net profit margin HAD 2022 6.95 Ratios Profitability ratios
3731 Net profit margin THB 2020 0.22 Ratios Profitability ratios
3732 Net profit margin THB 2021 0.43 Ratios Profitability ratios
3733 Net profit margin THB 2022 0.62 Ratios Profitability ratios
3734 Net profit margin VDL 2020 3.90 Ratios Profitability ratios
3735 Net profit margin VDL 2021 1.34 Ratios Profitability ratios
3736 Net profit margin VDL 2022 -8.70 Ratios Profitability ratios
3737 Net profit margin HAT 2020 1.17 Ratios Profitability ratios
3738 Net profit margin HAT 2021 0.24 Ratios Profitability ratios
3739 Net profit margin HAT 2022 1.70 Ratios Profitability ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 7.47125
1 Beverage Manufacturing 2021 1.84875
2 Beverage Manufacturing 2022 0.69250
3 SABECO 2020 17.66000
4 SABECO 2021 14.90000
5 SABECO 2022 15.72000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 7.47125 1.84875 0.6925
SABECO 17.66000 14.90000 15.7200
In [ ]:
# Filter data for Price to Book Ratio (P/B)
pb_ratio_indicator = 'P/B' # Ensure this matches the exact string in your dataset
# Filter data for P/B Ratio
pb_ratio = beverage_data[beverage_data['Indicator'] == pb_ratio_indicator].copy()
print("Filtered Data:")
print(pb_ratio)
# Create a new column 'Company Group' to categorize SABECO and other companies
pb_ratio['Company Group'] = pb_ratio['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = pb_ratio.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
pb_ratio_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(pb_ratio_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(pb_ratio_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(pb_ratio_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(pb_ratio_pivot.columns, year_positions)):
bars = ax.bar(year_position, pb_ratio_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('P/B Ratio')
ax.set_title('Price to Book Ratio (P/B)')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(pb_ratio_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('pb_ratio.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category Category 2
3888 P/B BHN 2020 3.12 Ratios Valuation ratios
3889 P/B BHN 2021 2.90 Ratios Valuation ratios
3890 P/B BHN 2022 2.01 Ratios Valuation ratios
3891 P/B SAB 2020 5.89 Ratios Valuation ratios
3892 P/B SAB 2021 4.29 Ratios Valuation ratios
3893 P/B SAB 2022 4.35 Ratios Valuation ratios
3894 P/B SCD 2020 1.04 Ratios Valuation ratios
3895 P/B SCD 2021 1.15 Ratios Valuation ratios
3896 P/B SCD 2022 1.62 Ratios Valuation ratios
3897 P/B SMB 2020 2.53 Ratios Valuation ratios
3898 P/B SMB 2021 2.30 Ratios Valuation ratios
3899 P/B SMB 2022 2.19 Ratios Valuation ratios
3900 P/B VCF 2020 4.01 Ratios Valuation ratios
3901 P/B VCF 2021 4.93 Ratios Valuation ratios
3902 P/B VCF 2022 3.56 Ratios Valuation ratios
3903 P/B HAD 2020 1.18 Ratios Valuation ratios
3904 P/B HAD 2021 0.96 Ratios Valuation ratios
3905 P/B HAD 2022 0.78 Ratios Valuation ratios
3906 P/B THB 2020 0.73 Ratios Valuation ratios
3907 P/B THB 2021 1.17 Ratios Valuation ratios
3908 P/B THB 2022 0.94 Ratios Valuation ratios
3909 P/B VDL 2020 1.83 Ratios Valuation ratios
3910 P/B VDL 2021 1.63 Ratios Valuation ratios
3911 P/B VDL 2022 1.84 Ratios Valuation ratios
3912 P/B HAT 2020 1.31 Ratios Valuation ratios
3913 P/B HAT 2021 1.36 Ratios Valuation ratios
3914 P/B HAT 2022 0.99 Ratios Valuation ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 1.96875
1 Beverage Manufacturing 2021 2.05000
2 Beverage Manufacturing 2022 1.74125
3 SABECO 2020 5.89000
4 SABECO 2021 4.29000
5 SABECO 2022 4.35000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 1.96875 2.05 1.74125
SABECO 5.89000 4.29 4.35000
In [ ]:
# Filter data for Price to Earning Ratio (P/E)
pe_ratio_indicator = 'P/E' # Ensure this matches the exact string in your dataset
# Filter data for P/E Ratio
pe_ratio = beverage_data[beverage_data['Indicator'] == pe_ratio_indicator].copy()
print("Filtered Data:")
print(pe_ratio)
# Create a new column 'Company Group' to categorize SABECO and other companies
pe_ratio['Company Group'] = pe_ratio['Company'].apply(lambda x: 'SABECO' if x == 'SAB' else 'Beverage Manufacturing')
# Aggregate the data by 'Company Group' and 'Year'
aggregated_data = pe_ratio.groupby(['Company Group', 'Year'])['Value'].mean().reset_index()
print("Aggregated Data:")
print(aggregated_data)
# Pivot the DataFrame
pe_ratio_pivot = aggregated_data.pivot(index='Company Group', columns='Year', values='Value')
print("Pivoted Data:")
print(pe_ratio_pivot)
# Plotting the data
fig, ax = plt.subplots(figsize=(10, 6))
# Plot bars for each year
width = 0.2 # Adjusted bar width for better spacing
positions = list(range(len(pe_ratio_pivot.index))) # Bar positions
# Create a list of positions for each year
year_positions = [ [p + width*i for p in positions] for i in range(len(pe_ratio_pivot.columns)) ]
for i, (year, year_position) in enumerate(zip(pe_ratio_pivot.columns, year_positions)):
bars = ax.bar(year_position, pe_ratio_pivot[year], width=width, label=f'{year}')
# Add numeric values on top of the bars
for bar in bars:
height = bar.get_height()
ax.text(bar.get_x() + bar.get_width()/2., height,
f'{height:.2f}', ha='center', va='bottom')
# Set the labels and title
ax.set_xlabel('Company')
ax.set_ylabel('P/E Ratio')
ax.set_title('Price to Earning Ratio (P/E)')
ax.set_xticks([p + width for p in positions])
ax.set_xticklabels(pe_ratio_pivot.index, rotation=0)
ax.legend()
# Save the figure
plt.savefig('pe_ratio.png')
# Show the plot
plt.show()
Filtered Data:
Indicator Company Year Value Category Category 2
3915 P/E BHN 2020 27.26 Ratios Valuation ratios
3916 P/E BHN 2021 46.06 Ratios Valuation ratios
3917 P/E BHN 2022 23.04 Ratios Valuation ratios
3918 P/E SAB 2020 26.47 Ratios Valuation ratios
3919 P/E SAB 2021 26.33 Ratios Valuation ratios
3920 P/E SAB 2022 20.49 Ratios Valuation ratios
3921 P/E SCD 2020 58.87 Ratios Valuation ratios
3922 P/E SCD 2021 -5.05 Ratios Valuation ratios
3923 P/E SCD 2022 -3.59 Ratios Valuation ratios
3924 P/E SMB 2020 7.87 Ratios Valuation ratios
3925 P/E SMB 2021 8.03 Ratios Valuation ratios
3926 P/E SMB 2022 7.18 Ratios Valuation ratios
3927 P/E VCF 2020 8.31 Ratios Valuation ratios
3928 P/E VCF 2021 14.53 Ratios Valuation ratios
3929 P/E VCF 2022 18.57 Ratios Valuation ratios
3930 P/E HAD 2020 11.02 Ratios Valuation ratios
3931 P/E HAD 2021 21.96 Ratios Valuation ratios
3932 P/E HAD 2022 5.62 Ratios Valuation ratios
3933 P/E THB 2020 34.89 Ratios Valuation ratios
3934 P/E THB 2021 31.19 Ratios Valuation ratios
3935 P/E THB 2022 14.41 Ratios Valuation ratios
3936 P/E VDL 2020 32.64 Ratios Valuation ratios
3937 P/E VDL 2021 103.57 Ratios Valuation ratios
3938 P/E VDL 2022 -21.60 Ratios Valuation ratios
3939 P/E HAT 2020 9.37 Ratios Valuation ratios
3940 P/E HAT 2021 65.96 Ratios Valuation ratios
3941 P/E HAT 2022 3.56 Ratios Valuation ratios
Aggregated Data:
Company Group Year Value
0 Beverage Manufacturing 2020 23.77875
1 Beverage Manufacturing 2021 35.78125
2 Beverage Manufacturing 2022 5.89875
3 SABECO 2020 26.47000
4 SABECO 2021 26.33000
5 SABECO 2022 20.49000
Pivoted Data:
Year 2020 2021 2022
Company Group
Beverage Manufacturing 23.77875 35.78125 5.89875
SABECO 26.47000 26.33000 20.49000
In [ ]:
#MA30
In [ ]:
# Load the data
file_path = 'MA30.xlsx' # Replace with the correct path
stock_data = pd.read_excel(file_path)
# Inspect the data
print(stock_data.head())
print(stock_data.columns)
Date VN-INDEX SAB MA30 (VN-INDEX) MA30 (SAB) 0 2023-01-03 1043.90 84.50 NaN NaN 1 2023-01-04 1046.35 85.75 NaN NaN 2 2023-01-05 1055.82 87.10 NaN NaN 3 2023-01-06 1051.44 90.35 NaN NaN 4 2023-01-09 1054.21 89.80 NaN NaN Index(['Date', 'VN-INDEX', 'SAB', 'MA30 (VN-INDEX)', 'MA30 (SAB)'], dtype='object')
In [ ]:
In [ ]:
# Calculate the 30-day moving average for SAB
stock_data['MA30_SAB'] = stock_data['SAB'].rolling(window=30).mean()
# Calculate the 30-day moving average for VN-INDEX
stock_data['MA30_VNINDEX'] = stock_data['VN-INDEX'].rolling(window=30).mean()
# Inspect the updated data
print(stock_data[['Date', 'SAB', 'MA30_SAB', 'VN-INDEX', 'MA30_VNINDEX']].head(35)) # Display more rows to see the moving average
Date SAB MA30_SAB VN-INDEX MA30_VNINDEX 0 2023-01-03 84.50 NaN 1043.90 NaN 1 2023-01-04 85.75 NaN 1046.35 NaN 2 2023-01-05 87.10 NaN 1055.82 NaN 3 2023-01-06 90.35 NaN 1051.44 NaN 4 2023-01-09 89.80 NaN 1054.21 NaN 5 2023-01-10 88.05 NaN 1053.35 NaN 6 2023-01-11 90.50 NaN 1055.76 NaN 7 2023-01-12 89.90 NaN 1056.39 NaN 8 2023-01-13 92.95 NaN 1060.17 NaN 9 2023-01-16 94.25 NaN 1066.68 NaN 10 2023-01-17 93.50 NaN 1088.29 NaN 11 2023-01-18 93.50 NaN 1098.28 NaN 12 2023-01-19 92.85 NaN 1108.08 NaN 13 2023-01-27 96.55 NaN 1117.10 NaN 14 2023-01-30 95.00 NaN 1102.57 NaN 15 2023-01-31 94.50 NaN 1111.18 NaN 16 2023-02-01 93.50 NaN 1075.97 NaN 17 2023-02-02 93.75 NaN 1077.59 NaN 18 2023-02-03 96.40 NaN 1077.15 NaN 19 2023-02-06 97.70 NaN 1089.29 NaN 20 2023-02-07 95.60 NaN 1065.84 NaN 21 2023-02-08 98.60 NaN 1072.22 NaN 22 2023-02-09 96.90 NaN 1064.03 NaN 23 2023-02-10 94.70 NaN 1055.30 NaN 24 2023-02-13 95.95 NaN 1043.70 NaN 25 2023-02-14 94.00 NaN 1038.64 NaN 26 2023-02-15 94.50 NaN 1048.20 NaN 27 2023-02-16 94.85 NaN 1058.29 NaN 28 2023-02-17 93.55 NaN 1059.31 NaN 29 2023-02-20 95.10 93.138333 1086.69 1069.393000 30 2023-02-21 95.25 93.496667 1082.23 1070.670667 31 2023-02-22 93.50 93.755000 1054.28 1070.935000 32 2023-02-23 93.55 93.970000 1053.66 1070.863000 33 2023-02-24 94.50 94.108333 1039.56 1070.467000 34 2023-02-27 92.65 94.203333 1021.25 1069.368333
In [ ]:
import matplotlib.pyplot as plt
# Plot for SAB
plt.figure(figsize=(12, 6))
plt.plot(stock_data['Date'], stock_data['SAB'], label='Actual Closing Prices (SAB)')
plt.plot(stock_data['Date'], stock_data['MA30_SAB'], label='30-day Moving Average (SAB)', color='orange')
plt.title('MA30 (SAB)')
plt.xlabel('Date')
plt.ylabel('Price')
plt.legend()
plt.xticks(rotation=45)
plt.savefig('MA30_SAB.png')
plt.show()
# Plot for VN-INDEX
plt.figure(figsize=(12, 6))
plt.plot(stock_data['Date'], stock_data['VN-INDEX'], label='Actual Closing Prices (VN-INDEX)')
plt.plot(stock_data['Date'], stock_data['MA30_VNINDEX'], label='30-day Moving Average (VN-INDEX)', color='orange')
plt.title('MA30 (VN-INDEX)')
plt.xlabel('Date')
plt.ylabel('VNINDEX')
plt.legend()
plt.xticks(rotation=45)
plt.savefig('MA30_VNINDEX.png')
plt.show()
In [ ]:
# Create and Summarize the LSTM Model
# Define the model
model = Sequential()
model.add(LSTM(64, return_sequences=True, input_shape=(1, 1)))
model.add(Dropout(0.2))
model.add(LSTM(64, return_sequences=True))
model.add(Dropout(0.2))
model.add(LSTM(64))
model.add(Dropout(0.2))
model.add(Dense(1))
# Display the model summary
model.summary()
# Visualize and save the model as an image
plot_model(model, to_file='LSTM_model.png', show_shapes=True, show_layer_names=True)
# Save the model summary to a file
with open('model_summary.txt', 'w') as f:
model.summary(print_fn=lambda x: f.write(x + '\n'))
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
Model: "sequential"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓ ┃ Layer (type) ┃ Output Shape ┃ Param # ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩ │ lstm (LSTM) │ (None, 1, 64) │ 16,896 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout (Dropout) │ (None, 1, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ lstm_1 (LSTM) │ (None, 1, 64) │ 33,024 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_1 (Dropout) │ (None, 1, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ lstm_2 (LSTM) │ (None, 64) │ 33,024 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_2 (Dropout) │ (None, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dense (Dense) │ (None, 1) │ 65 │ └─────────────────────────────────┴────────────────────────┴───────────────┘
Total params: 83,009 (324.25 KB)
Trainable params: 83,009 (324.25 KB)
Non-trainable params: 0 (0.00 B)
--------------------------------------------------------------------------- FileNotFoundError Traceback (most recent call last) File ~/grad_prj/.conda/lib/python3.12/site-packages/pydot/core.py:1753, in Dot.create(self, prog, format, encoding) 1752 try: -> 1753 stdout_data, stderr_data, process = call_graphviz( 1754 program=prog, 1755 arguments=arguments, 1756 working_dir=tmp_dir, 1757 ) 1758 except OSError as e: File ~/grad_prj/.conda/lib/python3.12/site-packages/pydot/core.py:133, in call_graphviz(program, arguments, working_dir, **kwargs) 131 program_with_args = [program] + arguments --> 133 process = subprocess.Popen( 134 program_with_args, 135 env=env, 136 cwd=working_dir, 137 shell=False, 138 stderr=subprocess.PIPE, 139 stdout=subprocess.PIPE, 140 **kwargs, 141 ) 142 stdout_data, stderr_data = process.communicate() File ~/grad_prj/.conda/lib/python3.12/subprocess.py:1026, in Popen.__init__(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags, restore_signals, start_new_session, pass_fds, user, group, extra_groups, encoding, errors, text, umask, pipesize, process_group) 1023 self.stderr = io.TextIOWrapper(self.stderr, 1024 encoding=encoding, errors=errors) -> 1026 self._execute_child(args, executable, preexec_fn, close_fds, 1027 pass_fds, cwd, env, 1028 startupinfo, creationflags, shell, 1029 p2cread, p2cwrite, 1030 c2pread, c2pwrite, 1031 errread, errwrite, 1032 restore_signals, 1033 gid, gids, uid, umask, 1034 start_new_session, process_group) 1035 except: 1036 # Cleanup if the child failed starting. File ~/grad_prj/.conda/lib/python3.12/subprocess.py:1955, in Popen._execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session, process_group) 1954 if err_filename is not None: -> 1955 raise child_exception_type(errno_num, err_msg, err_filename) 1956 else: FileNotFoundError: [Errno 2] No such file or directory: 'dot' During handling of the above exception, another exception occurred: FileNotFoundError Traceback (most recent call last) File ~/grad_prj/.conda/lib/python3.12/site-packages/keras/src/utils/model_visualization.py:37, in check_graphviz() 34 try: 35 # Attempt to create an image of a blank graph 36 # to check the pydot/graphviz installation. ---> 37 pydot.Dot.create(pydot.Dot()) 38 return True File ~/grad_prj/.conda/lib/python3.12/site-packages/pydot/core.py:1762, in Dot.create(self, prog, format, encoding) 1761 args[1] = '"{prog}" not found in path.'.format(prog=prog) -> 1762 raise OSError(*args) 1763 else: FileNotFoundError: [Errno 2] "dot" not found in path. During handling of the above exception, another exception occurred: AttributeError Traceback (most recent call last) Cell In[14], line 16 13 model.summary() 15 # Visualize and save the model as an image ---> 16 plot_model(model, to_file='LSTM_model.png', show_shapes=True, show_layer_names=True) 18 # Save the model summary to a file 19 with open('model_summary.txt', 'w') as f: File ~/grad_prj/.conda/lib/python3.12/site-packages/keras/src/utils/model_visualization.py:416, in plot_model(model, to_file, show_shapes, show_dtype, show_layer_names, rankdir, expand_nested, dpi, show_layer_activations, show_trainable, **kwargs) 414 else: 415 raise ImportError(message) --> 416 if not check_graphviz(): 417 message = ( 418 "You must install graphviz " 419 "(see instructions at https://graphviz.gitlab.io/download/) " 420 "for `plot_model` to work." 421 ) 422 if "IPython.core.magics.namespace" in sys.modules: 423 # We don't raise an exception here in order to avoid crashing 424 # notebook tests where graphviz is not available. File ~/grad_prj/.conda/lib/python3.12/site-packages/keras/src/utils/model_visualization.py:39, in check_graphviz() 37 pydot.Dot.create(pydot.Dot()) 38 return True ---> 39 except (OSError, pydot.InvocationException): 40 return False AttributeError: module 'pydot' has no attribute 'InvocationException'
In [ ]:
import matplotlib.pyplot as plt
# Plot actual vs predicted stock prices for SAB
plt.figure(figsize=(12, 6))
plt.plot(stock_data['Date'], stock_data['SAB'], label='Actual Closing Prices (SAB)')
plt.plot(stock_data['Date'], stock_data['MA30_SAB'], label='30-day Moving Average (SAB)', color='orange')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('MA30 (SAB)')
plt.legend()
plt.savefig('MA30_SAB.png')
plt.show()
In [ ]:
# Load the datasets
sab_stock_2018_2022 = pd.read_excel('SAB stock price 2018-2022.xlsx')
sab_stock_2023 = pd.read_excel('SAB stock price 2023.xlsx')
vn_index_2018_2023 = pd.read_excel('VN-Index-2018-2023.xlsx')
# Concatenate the stock data for a continuous timeline
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
# Prepare the data for modeling
sab_stock['Date'] = pd.to_datetime(sab_stock['Date'])
vn_index_2018_2023['Date'] = pd.to_datetime(vn_index_2018_2023['Date'])
# Ensure data is sorted by date
sab_stock.sort_values('Date', inplace=True)
vn_index_2018_2023.sort_values('Date', inplace=True)
# Reset the index
sab_stock.reset_index(drop=True, inplace=True)
vn_index_2018_2023.reset_index(drop=True, inplace=True)
# Display the first few rows to check the data
print(sab_stock.head())
print(vn_index_2018_2023.head())
Date Close Open High Low Volume
0 2018-01-02 126.15 127.00 127.35 124.95 116190
1 2018-01-03 132.50 126.15 132.50 126.15 231500
2 2018-01-04 133.75 134.00 134.35 131.50 238800
3 2018-01-05 132.15 134.00 134.25 131.55 131870
4 2018-01-08 130.50 131.50 132.00 129.50 115690
Date Close Open High Low Volume
0 2018-01-02 995.77 986.05 996.18 984.24 172887390
1 2018-01-03 1005.67 999.86 1010.21 995.77 212432620
2 2018-01-04 1019.75 1009.37 1019.75 1005.67 235169670
3 2018-01-05 1012.65 1020.34 1020.60 1010.65 265519370
4 2018-01-08 1022.90 1011.36 1022.90 1004.89 234755510
In [ ]:
# Define a function to prepare data for LSTM
def prepare_data(stock, time_step=60):
data = stock['Close'].values
data = data.reshape(-1, 1)
scaler = MinMaxScaler(feature_range=(0, 1))
data = scaler.fit_transform(data)
X, y = [], []
for i in range(time_step, len(data)):
X.append(data[i-time_step:i, 0])
y.append(data[i, 0])
X, y = np.array(X), np.array(y)
X = np.reshape(X, (X.shape[0], X.shape[1], 1))
return X, y, scaler
# Split the data
train_size = int(len(sab_stock) * 0.8)
train_data, test_data = sab_stock[:train_size], sab_stock[train_size:]
X_train, y_train, scaler = prepare_data(train_data)
X_test, y_test, _ = prepare_data(test_data)
In [ ]:
# Create LSTM model
lstm_model = Sequential()
lstm_model.add(LSTM(50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
lstm_model.add(Dropout(0.2))
lstm_model.add(LSTM(50, return_sequences=False))
lstm_model.add(Dropout(0.2))
lstm_model.add(Dense(1))
lstm_model.compile(optimizer='adam', loss='mean_squared_error')
# Train the model
lstm_model.fit(X_train, y_train, epochs=50, batch_size=32, validation_split=0.2)
# Predict and inverse transform the data
lstm_predictions = lstm_model.predict(X_test)
lstm_predictions = scaler.inverse_transform(lstm_predictions)
# Ensure the predicted and actual values have the same length
test_data = test_data[-len(lstm_predictions):]
# Calculate MSE
lstm_mse = np.mean(np.square(test_data['Close'].values - lstm_predictions.flatten()))
print(f"LSTM Model MSE: {lstm_mse}")
# Plot the results
plt.figure(figsize=(14, 7))
plt.plot(test_data['Date'], test_data['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(test_data['Date'], lstm_predictions, color='red', label='Predicted SAB Stock Price')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('SAB Stock Price Prediction using LSTM')
plt.legend()
plt.show()
Epoch 1/50
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 27ms/step - loss: 0.1346 - val_loss: 0.0083 Epoch 2/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0092 - val_loss: 0.0020 Epoch 3/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0064 - val_loss: 0.0016 Epoch 4/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0063 - val_loss: 0.0016 Epoch 5/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0054 - val_loss: 0.0014 Epoch 6/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0052 - val_loss: 0.0014 Epoch 7/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0054 - val_loss: 0.0014 Epoch 8/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0048 - val_loss: 0.0013 Epoch 9/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0045 - val_loss: 0.0016 Epoch 10/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0052 - val_loss: 0.0013 Epoch 11/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0052 - val_loss: 0.0012 Epoch 12/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0048 - val_loss: 0.0013 Epoch 13/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0042 - val_loss: 0.0011 Epoch 14/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0056 - val_loss: 0.0011 Epoch 15/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0038 - val_loss: 0.0011 Epoch 16/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0042 - val_loss: 0.0014 Epoch 17/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0046 - val_loss: 0.0014 Epoch 18/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0045 - val_loss: 0.0010 Epoch 19/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0042 - val_loss: 0.0012 Epoch 20/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0048 - val_loss: 0.0012 Epoch 21/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0041 - val_loss: 0.0010 Epoch 22/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0037 - val_loss: 9.7077e-04 Epoch 23/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0039 - val_loss: 9.2443e-04 Epoch 24/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0040 - val_loss: 9.5718e-04 Epoch 25/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0038 - val_loss: 0.0010 Epoch 26/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0033 - val_loss: 9.1101e-04 Epoch 27/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0031 - val_loss: 8.6905e-04 Epoch 28/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0031 - val_loss: 8.8806e-04 Epoch 29/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0037 - val_loss: 0.0011 Epoch 30/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0034 - val_loss: 8.4052e-04 Epoch 31/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0030 - val_loss: 8.5448e-04 Epoch 32/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0039 - val_loss: 8.3314e-04 Epoch 33/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0038 - val_loss: 9.4404e-04 Epoch 34/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0030 - val_loss: 7.7676e-04 Epoch 35/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0028 - val_loss: 7.9905e-04 Epoch 36/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0032 - val_loss: 7.7291e-04 Epoch 37/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0034 - val_loss: 7.7526e-04 Epoch 38/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0031 - val_loss: 7.3472e-04 Epoch 39/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0030 - val_loss: 7.6242e-04 Epoch 40/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0028 - val_loss: 7.4094e-04 Epoch 41/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0030 - val_loss: 7.2760e-04 Epoch 42/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0031 - val_loss: 7.5767e-04 Epoch 43/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0029 - val_loss: 6.9907e-04 Epoch 44/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0028 - val_loss: 7.5263e-04 Epoch 45/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0032 - val_loss: 6.9136e-04 Epoch 46/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0029 - val_loss: 6.6767e-04 Epoch 47/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0023 - val_loss: 8.2146e-04 Epoch 48/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0027 - val_loss: 7.2271e-04 Epoch 49/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0028 - val_loss: 6.6953e-04 Epoch 50/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0028 - val_loss: 6.6487e-04 8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step LSTM Model MSE: 809.4264993020029
In [ ]:
# Create GRU model
gru_model = Sequential()
gru_model.add(GRU(50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
gru_model.add(Dropout(0.2))
gru_model.add(GRU(50, return_sequences=False))
gru_model.add(Dropout(0.2))
gru_model.add(Dense(1))
gru_model.compile(optimizer='adam', loss='mean_squared_error')
# Train the model
gru_model.fit(X_train, y_train, epochs=50, batch_size=32, validation_split=0.2)
# Predict and inverse transform the data
gru_predictions = gru_model.predict(X_test)
gru_predictions = scaler.inverse_transform(gru_predictions)
# Ensure the predicted and actual values have the same length
test_data_trimmed = test_data[-len(gru_predictions):]
# Calculate MSE
gru_mse = np.mean(np.square(test_data_trimmed['Close'].values - gru_predictions.flatten()))
print(f"GRU Model MSE: {gru_mse}")
# Plot the results
plt.figure(figsize=(14, 7))
plt.plot(test_data_trimmed['Date'], test_data_trimmed['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(test_data_trimmed['Date'], gru_predictions, color='red', label='Predicted SAB Stock Price')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('SAB Stock Price Prediction using GRU')
plt.legend()
plt.show()
Epoch 1/50
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
29/29 ━━━━━━━━━━━━━━━━━━━━ 3s 35ms/step - loss: 0.1126 - val_loss: 0.0015 Epoch 2/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0106 - val_loss: 0.0015 Epoch 3/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0050 - val_loss: 6.9716e-04 Epoch 4/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0045 - val_loss: 6.2371e-04 Epoch 5/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0040 - val_loss: 8.7072e-04 Epoch 6/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0043 - val_loss: 5.8464e-04 Epoch 7/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0045 - val_loss: 5.8578e-04 Epoch 8/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0040 - val_loss: 5.6520e-04 Epoch 9/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0037 - val_loss: 5.4297e-04 Epoch 10/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0035 - val_loss: 5.6580e-04 Epoch 11/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0036 - val_loss: 5.6791e-04 Epoch 12/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0040 - val_loss: 5.4135e-04 Epoch 13/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0034 - val_loss: 6.3923e-04 Epoch 14/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0034 - val_loss: 5.1531e-04 Epoch 15/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0034 - val_loss: 5.4509e-04 Epoch 16/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0031 - val_loss: 4.9947e-04 Epoch 17/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0030 - val_loss: 4.9106e-04 Epoch 18/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0028 - val_loss: 7.8498e-04 Epoch 19/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0029 - val_loss: 4.7730e-04 Epoch 20/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0026 - val_loss: 6.5556e-04 Epoch 21/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0029 - val_loss: 4.8568e-04 Epoch 22/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0027 - val_loss: 5.4132e-04 Epoch 23/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0028 - val_loss: 4.8506e-04 Epoch 24/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0026 - val_loss: 4.8447e-04 Epoch 25/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0029 - val_loss: 4.7869e-04 Epoch 26/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0026 - val_loss: 6.7868e-04 Epoch 27/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0028 - val_loss: 4.4178e-04 Epoch 28/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0025 - val_loss: 5.5192e-04 Epoch 29/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0025 - val_loss: 5.0921e-04 Epoch 30/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0025 - val_loss: 5.1470e-04 Epoch 31/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0028 - val_loss: 4.4677e-04 Epoch 32/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0025 - val_loss: 4.7692e-04 Epoch 33/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0028 - val_loss: 4.0363e-04 Epoch 34/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0023 - val_loss: 4.1398e-04 Epoch 35/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0026 - val_loss: 4.3294e-04 Epoch 36/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0025 - val_loss: 4.2203e-04 Epoch 37/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0024 - val_loss: 4.7568e-04 Epoch 38/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0022 - val_loss: 4.0949e-04 Epoch 39/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0022 - val_loss: 6.6197e-04 Epoch 40/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0020 - val_loss: 3.7759e-04 Epoch 41/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0022 - val_loss: 5.6219e-04 Epoch 42/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0025 - val_loss: 4.7750e-04 Epoch 43/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0025 - val_loss: 3.8924e-04 Epoch 44/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0021 - val_loss: 3.5942e-04 Epoch 45/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0022 - val_loss: 3.9668e-04 Epoch 46/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0021 - val_loss: 3.5216e-04 Epoch 47/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0020 - val_loss: 3.5729e-04 Epoch 48/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0021 - val_loss: 7.9366e-04 Epoch 49/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0024 - val_loss: 3.6709e-04 Epoch 50/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0022 - val_loss: 4.3674e-04 WARNING:tensorflow:5 out of the last 17 calls to <function TensorFlowTrainer.make_predict_function.<locals>.one_step_on_data_distributed at 0x369dfaf20> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has reduce_retracing=True option that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details. 8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step GRU Model MSE: 761.418598697986
In [ ]:
# Create Bidirectional LSTM model
bi_lstm_model = Sequential()
bi_lstm_model.add(Bidirectional(LSTM(50, return_sequences=True), input_shape=(X_train.shape[1], 1)))
bi_lstm_model.add(Dropout(0.2))
bi_lstm_model.add(Bidirectional(LSTM(50, return_sequences=False)))
bi_lstm_model.add(Dropout(0.2))
bi_lstm_model.add(Dense(1))
bi_lstm_model.compile(optimizer='adam', loss='mean_squared_error')
# Train the model
bi_lstm_model.fit(X_train, y_train, epochs=50, batch_size=32, validation_split=0.2)
# Predict and inverse transform the data
bi_lstm_predictions = bi_lstm_model.predict(X_test)
bi_lstm_predictions = scaler.inverse_transform(bi_lstm_predictions)
# Ensure the predicted and actual values have the same length
test_data_trimmed = test_data[-len(bi_lstm_predictions):]
# Calculate MSE
bi_lstm_mse = np.mean(np.square(test_data_trimmed['Close'].values - bi_lstm_predictions.flatten()))
print(f"Bidirectional LSTM Model MSE: {bi_lstm_mse}")
# Plot the results
plt.figure(figsize=(14, 7))
plt.plot(test_data_trimmed['Date'], test_data_trimmed['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(test_data_trimmed['Date'], bi_lstm_predictions, color='red', label='Predicted SAB Stock Price')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('SAB Stock Price Prediction using Bidirectional LSTM')
plt.legend()
plt.show()
Epoch 1/50
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
29/29 ━━━━━━━━━━━━━━━━━━━━ 3s 46ms/step - loss: 0.1062 - val_loss: 0.0058 Epoch 2/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0119 - val_loss: 0.0020 Epoch 3/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0061 - val_loss: 0.0021 Epoch 4/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 50ms/step - loss: 0.0060 - val_loss: 0.0017 Epoch 5/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0042 - val_loss: 0.0013 Epoch 6/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0044 - val_loss: 0.0012 Epoch 7/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0042 - val_loss: 9.7152e-04 Epoch 8/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0039 - val_loss: 9.3508e-04 Epoch 9/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0040 - val_loss: 0.0011 Epoch 10/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0032 - val_loss: 8.4712e-04 Epoch 11/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0035 - val_loss: 8.1754e-04 Epoch 12/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0037 - val_loss: 8.1912e-04 Epoch 13/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0033 - val_loss: 8.1372e-04 Epoch 14/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0026 - val_loss: 7.4891e-04 Epoch 15/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0032 - val_loss: 7.3412e-04 Epoch 16/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0028 - val_loss: 8.5471e-04 Epoch 17/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0027 - val_loss: 6.6650e-04 Epoch 18/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0027 - val_loss: 6.4584e-04 Epoch 19/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0028 - val_loss: 6.4599e-04 Epoch 20/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 50ms/step - loss: 0.0027 - val_loss: 6.1824e-04 Epoch 21/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0029 - val_loss: 6.2178e-04 Epoch 22/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0026 - val_loss: 5.8519e-04 Epoch 23/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0026 - val_loss: 6.1426e-04 Epoch 24/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0024 - val_loss: 7.0555e-04 Epoch 25/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0025 - val_loss: 5.5118e-04 Epoch 26/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0020 - val_loss: 8.1835e-04 Epoch 27/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0023 - val_loss: 5.6085e-04 Epoch 28/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0024 - val_loss: 5.3420e-04 Epoch 29/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0022 - val_loss: 7.7018e-04 Epoch 30/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0025 - val_loss: 0.0010 Epoch 31/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0026 - val_loss: 5.1953e-04 Epoch 32/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0022 - val_loss: 5.6499e-04 Epoch 33/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0026 - val_loss: 5.3659e-04 Epoch 34/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0021 - val_loss: 4.8516e-04 Epoch 35/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0019 - val_loss: 5.8618e-04 Epoch 36/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0021 - val_loss: 5.8045e-04 Epoch 37/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0019 - val_loss: 5.4962e-04 Epoch 38/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0018 - val_loss: 4.7471e-04 Epoch 39/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0022 - val_loss: 4.4494e-04 Epoch 40/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0020 - val_loss: 4.5356e-04 Epoch 41/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0022 - val_loss: 5.3777e-04 Epoch 42/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0019 - val_loss: 4.4128e-04 Epoch 43/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0019 - val_loss: 4.4495e-04 Epoch 44/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0019 - val_loss: 4.3301e-04 Epoch 45/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0021 - val_loss: 4.4280e-04 Epoch 46/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0016 - val_loss: 4.3736e-04 Epoch 47/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 50ms/step - loss: 0.0019 - val_loss: 5.0965e-04 Epoch 48/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0019 - val_loss: 4.3208e-04 Epoch 49/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0019 - val_loss: 5.4520e-04 Epoch 50/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step - loss: 0.0018 - val_loss: 4.4272e-04 8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step Bidirectional LSTM Model MSE: 661.7432880411119
In [ ]:
# Create Bidirectional GRU model
bi_gru_model = Sequential()
bi_gru_model.add(Bidirectional(GRU(50, return_sequences=True), input_shape=(X_train.shape[1], 1)))
bi_gru_model.add(Dropout(0.2))
bi_gru_model.add(Bidirectional(GRU(50, return_sequences=False)))
bi_gru_model.add(Dropout(0.2))
bi_gru_model.add(Dense(1))
bi_gru_model.compile(optimizer='adam', loss='mean_squared_error')
# Train the model
bi_gru_model.fit(X_train, y_train, epochs=50, batch_size=32, validation_split=0.2)
# Predict and inverse transform the data
bi_gru_predictions = bi_gru_model.predict(X_test)
bi_gru_predictions = scaler.inverse_transform(bi_gru_predictions)
# Ensure the predicted and actual values have the same length
test_data_trimmed = test_data[-len(bi_gru_predictions):]
# Calculate MSE
bi_gru_mse = np.mean(np.square(test_data_trimmed['Close'].values - bi_gru_predictions.flatten()))
print(f"Bidirectional GRU Model MSE: {bi_gru_mse}")
# Plot the results
plt.figure(figsize=(14, 7))
plt.plot(test_data_trimmed['Date'], test_data_trimmed['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(test_data_trimmed['Date'], bi_gru_predictions, color='red', label='Predicted SAB Stock Price')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('SAB Stock Price Prediction using Bidirectional GRU')
plt.legend()
plt.show()
Epoch 1/50
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
29/29 ━━━━━━━━━━━━━━━━━━━━ 4s 47ms/step - loss: 0.1118 - val_loss: 0.0027 Epoch 2/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0118 - val_loss: 0.0013 Epoch 3/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0061 - val_loss: 0.0010 Epoch 4/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0055 - val_loss: 0.0014 Epoch 5/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0041 - val_loss: 9.5005e-04 Epoch 6/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0041 - val_loss: 8.0046e-04 Epoch 7/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 65ms/step - loss: 0.0030 - val_loss: 5.3287e-04 Epoch 8/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0030 - val_loss: 5.2927e-04 Epoch 9/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0031 - val_loss: 4.7480e-04 Epoch 10/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0028 - val_loss: 4.9553e-04 Epoch 11/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0031 - val_loss: 6.9748e-04 Epoch 12/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 66ms/step - loss: 0.0026 - val_loss: 5.2497e-04 Epoch 13/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0026 - val_loss: 4.5178e-04 Epoch 14/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0023 - val_loss: 4.4230e-04 Epoch 15/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0025 - val_loss: 4.5767e-04 Epoch 16/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0026 - val_loss: 4.5569e-04 Epoch 17/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0024 - val_loss: 4.9064e-04 Epoch 18/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0024 - val_loss: 4.3582e-04 Epoch 19/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0026 - val_loss: 4.5874e-04 Epoch 20/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0022 - val_loss: 4.2044e-04 Epoch 21/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0024 - val_loss: 5.2486e-04 Epoch 22/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0027 - val_loss: 4.1066e-04 Epoch 23/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0021 - val_loss: 4.4137e-04 Epoch 24/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 66ms/step - loss: 0.0020 - val_loss: 4.5830e-04 Epoch 25/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0023 - val_loss: 4.3410e-04 Epoch 26/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 73ms/step - loss: 0.0020 - val_loss: 6.9948e-04 Epoch 27/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0021 - val_loss: 4.0204e-04 Epoch 28/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 63ms/step - loss: 0.0021 - val_loss: 6.0458e-04 Epoch 29/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0021 - val_loss: 5.4583e-04 Epoch 30/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0022 - val_loss: 4.1584e-04 Epoch 31/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0022 - val_loss: 4.7987e-04 Epoch 32/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 62ms/step - loss: 0.0019 - val_loss: 4.0409e-04 Epoch 33/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 60ms/step - loss: 0.0019 - val_loss: 4.8145e-04 Epoch 34/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0020 - val_loss: 3.7976e-04 Epoch 35/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 60ms/step - loss: 0.0017 - val_loss: 4.0132e-04 Epoch 36/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0018 - val_loss: 4.6261e-04 Epoch 37/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0021 - val_loss: 4.1398e-04 Epoch 38/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 60ms/step - loss: 0.0017 - val_loss: 3.6980e-04 Epoch 39/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 63ms/step - loss: 0.0018 - val_loss: 4.3861e-04 Epoch 40/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0017 - val_loss: 7.8826e-04 Epoch 41/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0020 - val_loss: 7.8023e-04 Epoch 42/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0022 - val_loss: 3.9631e-04 Epoch 43/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0018 - val_loss: 6.5484e-04 Epoch 44/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 61ms/step - loss: 0.0019 - val_loss: 3.9497e-04 Epoch 45/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0019 - val_loss: 3.6548e-04 Epoch 46/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0020 - val_loss: 5.1110e-04 Epoch 47/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0016 - val_loss: 3.7144e-04 Epoch 48/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0015 - val_loss: 4.0864e-04 Epoch 49/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0020 - val_loss: 3.5263e-04 Epoch 50/50 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0018 - val_loss: 7.6920e-04 8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 51ms/step Bidirectional GRU Model MSE: 650.3792891691302
In [ ]:
# Decision Tree model
dt_model = DecisionTreeRegressor()
dt_model.fit(X_train.reshape(X_train.shape[0], -1), y_train)
dt_predictions = dt_model.predict(X_test.reshape(X_test.shape[0], -1))
# Ensure the predicted and actual values have the same length
test_data_trimmed = test_data[-len(dt_predictions):]
# Scale the predictions to match the actual values for visualization
dt_predictions_scaled = scaler.inverse_transform(dt_predictions.reshape(-1, 1)).flatten()
# Calculate MSE
dt_mse = np.mean(np.square(test_data_trimmed['Close'].values - dt_predictions_scaled))
print(f"Decision Tree Model MSE: {dt_mse}")
# Plot the results
plt.figure(figsize=(14, 7))
plt.plot(test_data_trimmed['Date'], test_data_trimmed['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(test_data_trimmed['Date'], dt_predictions_scaled, color='red', label='Predicted SAB Stock Price')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('SAB Stock Price Prediction using Decision Tree')
plt.legend()
plt.show()
Decision Tree Model MSE: 779.4260208333334
In [ ]:
# Random Forest model
rf_model = RandomForestRegressor(n_estimators=100)
rf_model.fit(X_train.reshape(X_train.shape[0], -1), y_train)
rf_predictions = rf_model.predict(X_test.reshape(X_test.shape[0], -1))
# Ensure the predicted and actual values have the same length
test_data_trimmed = test_data[-len(rf_predictions):]
# Scale the predictions to match the actual values for visualization
rf_predictions_scaled = scaler.inverse_transform(rf_predictions.reshape(-1, 1)).flatten()
# Check the range of the actual and predicted values
print(f"Actual values range: {test_data_trimmed['Close'].min()} - {test_data_trimmed['Close'].max()}")
print(f"Predicted values range: {rf_predictions_scaled.min()} - {rf_predictions_scaled.max()}")
# Calculate MSE
rf_mse = np.mean(np.square(test_data_trimmed['Close'].values - rf_predictions_scaled))
print(f"Random Forest Model MSE: {rf_mse}")
# Normalize the predictions to match the actual values' range for better visualization
min_actual = test_data_trimmed['Close'].min()
max_actual = test_data_trimmed['Close'].max()
rf_predictions_normalized = (rf_predictions_scaled - rf_predictions_scaled.min()) / (rf_predictions_scaled.max() - rf_predictions_scaled.min()) * (max_actual - min_actual) + min_actual
# Plot the results
plt.figure(figsize=(14, 7))
plt.plot(test_data_trimmed['Date'], test_data_trimmed['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(test_data_trimmed['Date'], rf_predictions_scaled, color='red', label='Predicted SAB Stock Price')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('SAB Stock Price Prediction using Random Forest')
plt.legend()
plt.show()
Actual values range: 56.7 - 98.6 Predicted values range: 64.2035 - 140.68250000000003 Random Forest Model MSE: 761.4485308864588
In [ ]:
# XGBoost model
xgb_model = XGBRegressor(n_estimators=100)
xgb_model.fit(X_train.reshape(X_train.shape[0], -1), y_train)
xgb_predictions = xgb_model.predict(X_test.reshape(X_test.shape[0], -1))
# Ensure the predicted and actual values have the same length
test_data_trimmed = test_data[-len(xgb_predictions):]
# Scale the predictions to match the actual values for visualization
xgb_predictions_scaled = scaler.inverse_transform(xgb_predictions.reshape(-1, 1)).flatten()
# Calculate MSE
xgb_mse = np.mean(np.square(test_data_trimmed['Close'].values - xgb_predictions_scaled))
print(f"XGBoost Model MSE: {xgb_mse}")
# Plot the results
plt.figure(figsize=(14, 7))
plt.plot(test_data_trimmed['Date'], test_data_trimmed['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(test_data_trimmed['Date'], xgb_predictions_scaled, color='red', label='Predicted SAB Stock Price')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('SAB Stock Price Prediction using XGBoost')
plt.legend()
plt.show()
XGBoost Model MSE: 721.9259540302777
In [ ]:
# Function to evaluate model performance
def evaluate_model(test_data, predictions, model_name):
mse = mean_squared_error(test_data['Close'], predictions)
mae = mean_absolute_error(test_data['Close'], predictions)
r2 = r2_score(test_data['Close'], predictions)
print(f"{model_name} Performance:")
print(f"Mean Squared Error (MSE): {mse}")
print(f"Mean Absolute Error (MAE): {mae}")
print(f"R-squared: {r2}\n")
return mse, mae, r2
# Evaluate each model
lstm_mse, lstm_mae, lstm_r2 = evaluate_model(test_data_trimmed, lstm_predictions, 'LSTM')
gru_mse, gru_mae, gru_r2 = evaluate_model(test_data_trimmed, gru_predictions, 'GRU')
bi_lstm_mse, bi_lstm_mae, bi_lstm_r2 = evaluate_model(test_data_trimmed, bi_lstm_predictions, 'Bidirectional LSTM')
bi_gru_mse, bi_gru_mae, bi_gru_r2 = evaluate_model(test_data_trimmed, bi_gru_predictions, 'Bidirectional GRU')
dt_mse, dt_mae, dt_r2 = evaluate_model(test_data_trimmed, dt_predictions_scaled, 'Decision Tree')
rf_mse, rf_mae, rf_r2 = evaluate_model(test_data_trimmed, rf_predictions_scaled, 'Random Forest')
xgb_mse, xgb_mae, xgb_r2 = evaluate_model(test_data_trimmed, xgb_predictions_scaled, 'XGBoost')
# Create a DataFrame to compare the models
comparison_df = pd.DataFrame({
'Model': ['LSTM', 'GRU', 'Bidirectional LSTM', 'Bidirectional GRU', 'Decision Tree', 'Random Forest', 'XGBoost'],
'MSE': [lstm_mse, gru_mse, bi_lstm_mse, bi_gru_mse, dt_mse, rf_mse, xgb_mse],
'MAE': [lstm_mae, gru_mae, bi_lstm_mae, bi_gru_mae, dt_mae, rf_mae, xgb_mae],
'R-squared': [lstm_r2, gru_r2, bi_lstm_r2, bi_gru_r2, dt_r2, rf_r2, xgb_r2]
})
print(comparison_df)
LSTM Performance:
Mean Squared Error (MSE): 809.4264993020029
Mean Absolute Error (MAE): 26.143808676401772
R-squared: -6.169536619506011
GRU Performance:
Mean Squared Error (MSE): 761.418598697986
Mean Absolute Error (MAE): 25.437595529556276
R-squared: -5.744304184315274
Bidirectional LSTM Performance:
Mean Squared Error (MSE): 661.7432880411119
Mean Absolute Error (MAE): 23.71650201479594
R-squared: -4.861425021807815
Bidirectional GRU Performance:
Mean Squared Error (MSE): 650.3792891691302
Mean Absolute Error (MAE): 23.127321360905967
R-squared: -4.760767820533881
Decision Tree Performance:
Mean Squared Error (MSE): 779.4260208333334
Mean Absolute Error (MAE): 25.46125
R-squared: -5.903805846953708
Random Forest Performance:
Mean Squared Error (MSE): 761.4485308864588
Mean Absolute Error (MAE): 25.175056250000008
R-squared: -5.744569310205695
XGBoost Performance:
Mean Squared Error (MSE): 721.9259540302777
Mean Absolute Error (MAE): 24.58965017954508
R-squared: -5.394496064133345
Model MSE MAE R-squared
0 LSTM 809.426499 26.143809 -6.169537
1 GRU 761.418599 25.437596 -5.744304
2 Bidirectional LSTM 661.743288 23.716502 -4.861425
3 Bidirectional GRU 650.379289 23.127321 -4.760768
4 Decision Tree 779.426021 25.461250 -5.903806
5 Random Forest 761.448531 25.175056 -5.744569
6 XGBoost 721.925954 24.589650 -5.394496
In [ ]:
Model MSE MAE R-squared 0 LSTM 809.426499 26.143809 -6.169537 1 GRU 761.418599 25.437596 -5.744304 2 Bidirectional LSTM 661.743288 23.716502 -4.861425 3 Bidirectional GRU 650.379289 23.127321 -4.760768 4 Decision Tree 779.426021 25.461250 -5.903806 5 Random Forest 761.448531 25.175056 -5.744569 6 XGBoost 721.925954 24.589650 -5.394496
In [ ]:
# Concatenate the stock data for a continuous timeline
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock['Date'] = pd.to_datetime(sab_stock['Date'])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Prepare the data for modeling
def prepare_data(stock, time_step=60):
data = stock['Close'].values
data = data.reshape(-1, 1)
scaler = MinMaxScaler(feature_range=(0, 1))
data = scaler.fit_transform(data)
X, y = [], []
for i in range(time_step, len(data)):
X.append(data[i-time_step:i, 0])
y.append(data[i, 0])
X, y = np.array(X), np.array(y)
X = np.reshape(X, (X.shape[0], X.shape[1], 1))
return X, y, scaler
# Split the data
train_size = int(len(sab_stock) * 0.8)
train_data, test_data = sab_stock[:train_size], sab_stock[train_size:]
X_train, y_train, scaler = prepare_data(train_data)
X_test, y_test, _ = prepare_data(test_data)
# Early stopping callback to prevent overfitting
early_stopping = EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=True)
# Train the LSTM model
lstm_model = Sequential()
lstm_model.add(LSTM(50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
lstm_model.add(Dropout(0.2))
lstm_model.add(LSTM(50, return_sequences=False))
lstm_model.add(Dropout(0.2))
lstm_model.add(Dense(1))
lstm_model.compile(optimizer='adam', loss='mean_squared_error')
lstm_model.fit(X_train, y_train, epochs=100, batch_size=32, validation_split=0.2, callbacks=[early_stopping])
# Train the GRU model
gru_model = Sequential()
gru_model.add(GRU(50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
gru_model.add(Dropout(0.2))
gru_model.add(GRU(50, return_sequences=False))
gru_model.add(Dropout(0.2))
gru_model.add(Dense(1))
gru_model.compile(optimizer='adam', loss='mean_squared_error')
gru_model.fit(X_train, y_train, epochs=100, batch_size=32, validation_split=0.2, callbacks=[early_stopping])
# Train the Bidirectional LSTM model
bi_lstm_model = Sequential()
bi_lstm_model.add(Bidirectional(LSTM(50, return_sequences=True), input_shape=(X_train.shape[1], 1)))
bi_lstm_model.add(Dropout(0.2))
bi_lstm_model.add(Bidirectional(LSTM(50, return_sequences=False)))
bi_lstm_model.add(Dropout(0.2))
bi_lstm_model.add(Dense(1))
bi_lstm_model.compile(optimizer='adam', loss='mean_squared_error')
bi_lstm_model.fit(X_train, y_train, epochs=100, batch_size=32, validation_split=0.2, callbacks=[early_stopping])
# Train the Bidirectional GRU model
bi_gru_model = Sequential()
bi_gru_model.add(Bidirectional(GRU(50, return_sequences=True), input_shape=(X_train.shape[1], 1)))
bi_gru_model.add(Dropout(0.2))
bi_gru_model.add(Bidirectional(GRU(50, return_sequences=False)))
bi_gru_model.add(Dropout(0.2))
bi_gru_model.add(Dense(1))
bi_gru_model.compile(optimizer='adam', loss='mean_squared_error')
bi_gru_model.fit(X_train, y_train, epochs=100, batch_size=32, validation_split=0.2, callbacks=[early_stopping])
# Train the Decision Tree model
X_train_flat = X_train.reshape(X_train.shape[0], -1)
X_test_flat = X_test.reshape(X_test.shape[0], -1)
dt_model = DecisionTreeRegressor()
dt_model.fit(X_train_flat, y_train)
# Train the Random Forest model
rf_model = RandomForestRegressor(n_estimators=100)
rf_model.fit(X_train_flat, y_train)
# Train the XGBoost model
xgb_model = XGBRegressor(n_estimators=100)
xgb_model.fit(X_train_flat, y_train)
Epoch 1/100
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
29/29 ━━━━━━━━━━━━━━━━━━━━ 4s 62ms/step - loss: 0.1129 - val_loss: 0.0077 Epoch 2/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0091 - val_loss: 0.0017 Epoch 3/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0067 - val_loss: 0.0015 Epoch 4/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0057 - val_loss: 0.0015 Epoch 5/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0060 - val_loss: 0.0014 Epoch 6/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0055 - val_loss: 0.0014 Epoch 7/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0057 - val_loss: 0.0014 Epoch 8/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0049 - val_loss: 0.0013 Epoch 9/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0053 - val_loss: 0.0016 Epoch 10/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0056 - val_loss: 0.0014 Epoch 11/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0042 - val_loss: 0.0012 Epoch 12/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0045 - val_loss: 0.0011 Epoch 13/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0046 - val_loss: 0.0012 Epoch 14/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0041 - val_loss: 0.0020 Epoch 15/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0052 - val_loss: 0.0011 Epoch 16/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0046 - val_loss: 0.0010 Epoch 17/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0046 - val_loss: 9.7463e-04 Epoch 18/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0041 - val_loss: 9.4887e-04 Epoch 19/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0039 - val_loss: 9.2786e-04 Epoch 20/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0040 - val_loss: 9.9084e-04 Epoch 21/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0037 - val_loss: 0.0013 Epoch 22/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0042 - val_loss: 8.3878e-04 Epoch 23/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0036 - val_loss: 0.0011 Epoch 24/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0043 - val_loss: 8.5105e-04 Epoch 25/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0034 - val_loss: 8.2337e-04 Epoch 26/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0035 - val_loss: 8.7991e-04 Epoch 27/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0036 - val_loss: 7.6690e-04 Epoch 28/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0035 - val_loss: 7.4809e-04 Epoch 29/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0035 - val_loss: 7.3307e-04 Epoch 30/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0030 - val_loss: 7.0966e-04 Epoch 31/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0031 - val_loss: 7.1612e-04 Epoch 32/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0033 - val_loss: 6.9956e-04 Epoch 33/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0027 - val_loss: 6.8280e-04 Epoch 34/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0034 - val_loss: 9.0121e-04 Epoch 35/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0030 - val_loss: 6.8257e-04 Epoch 36/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0032 - val_loss: 6.5343e-04 Epoch 37/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0029 - val_loss: 6.5707e-04 Epoch 38/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0027 - val_loss: 6.4737e-04 Epoch 39/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0026 - val_loss: 6.1749e-04 Epoch 40/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0027 - val_loss: 6.1069e-04 Epoch 41/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0028 - val_loss: 6.6093e-04 Epoch 42/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0030 - val_loss: 6.1853e-04 Epoch 43/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0026 - val_loss: 5.9621e-04 Epoch 44/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0025 - val_loss: 5.9513e-04 Epoch 45/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0027 - val_loss: 6.4147e-04 Epoch 46/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0027 - val_loss: 5.9238e-04 Epoch 47/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 46ms/step - loss: 0.0027 - val_loss: 6.0355e-04 Epoch 48/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0025 - val_loss: 6.6347e-04 Epoch 49/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0022 - val_loss: 5.7223e-04 Epoch 50/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0024 - val_loss: 5.8388e-04 Epoch 51/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0023 - val_loss: 6.5767e-04 Epoch 52/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0024 - val_loss: 5.5513e-04 Epoch 53/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0027 - val_loss: 5.8394e-04 Epoch 54/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0024 - val_loss: 5.8123e-04 Epoch 55/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0026 - val_loss: 5.7511e-04 Epoch 56/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0022 - val_loss: 6.6506e-04 Epoch 57/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0025 - val_loss: 5.2967e-04 Epoch 58/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0022 - val_loss: 5.2804e-04 Epoch 59/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0024 - val_loss: 5.2832e-04 Epoch 60/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 42ms/step - loss: 0.0023 - val_loss: 5.2724e-04 Epoch 61/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0023 - val_loss: 5.1121e-04 Epoch 62/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0024 - val_loss: 5.4809e-04 Epoch 63/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0021 - val_loss: 5.4911e-04 Epoch 64/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0022 - val_loss: 5.2863e-04 Epoch 65/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0023 - val_loss: 5.2508e-04 Epoch 66/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0019 - val_loss: 5.0427e-04 Epoch 67/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0019 - val_loss: 5.1283e-04 Epoch 68/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0019 - val_loss: 5.0499e-04 Epoch 69/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0019 - val_loss: 5.0702e-04 Epoch 70/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0021 - val_loss: 5.1519e-04 Epoch 71/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0022 - val_loss: 5.1998e-04 Epoch 72/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0025 - val_loss: 5.1498e-04 Epoch 73/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0018 - val_loss: 5.2600e-04 Epoch 74/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0019 - val_loss: 4.8988e-04 Epoch 75/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0023 - val_loss: 4.6790e-04 Epoch 76/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0020 - val_loss: 5.4461e-04 Epoch 77/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0020 - val_loss: 4.7722e-04 Epoch 78/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0017 - val_loss: 6.3189e-04 Epoch 79/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0018 - val_loss: 4.7583e-04 Epoch 80/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0018 - val_loss: 5.2540e-04 Epoch 81/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0019 - val_loss: 4.5566e-04 Epoch 82/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0016 - val_loss: 4.7436e-04 Epoch 83/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0019 - val_loss: 4.5243e-04 Epoch 84/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0018 - val_loss: 4.4452e-04 Epoch 85/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0015 - val_loss: 4.4281e-04 Epoch 86/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0018 - val_loss: 4.4198e-04 Epoch 87/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0016 - val_loss: 4.3913e-04 Epoch 88/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0017 - val_loss: 4.3848e-04 Epoch 89/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0017 - val_loss: 5.2179e-04 Epoch 90/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0017 - val_loss: 4.3058e-04 Epoch 91/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0017 - val_loss: 5.1448e-04 Epoch 92/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0017 - val_loss: 5.2440e-04 Epoch 93/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0016 - val_loss: 4.2862e-04 Epoch 94/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0016 - val_loss: 5.1436e-04 Epoch 95/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0016 - val_loss: 4.2072e-04 Epoch 96/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0017 - val_loss: 4.6973e-04 Epoch 97/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0017 - val_loss: 4.4807e-04 Epoch 98/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 27ms/step - loss: 0.0015 - val_loss: 4.3789e-04 Epoch 99/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0015 - val_loss: 4.1691e-04 Epoch 100/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0015 - val_loss: 4.2667e-04 Epoch 1/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 3s 42ms/step - loss: 0.1215 - val_loss: 0.0019 Epoch 2/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0098 - val_loss: 0.0020 Epoch 3/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 29ms/step - loss: 0.0055 - val_loss: 7.9322e-04 Epoch 4/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0055 - val_loss: 6.8789e-04 Epoch 5/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0047 - val_loss: 6.3169e-04 Epoch 6/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0041 - val_loss: 6.3561e-04 Epoch 7/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0037 - val_loss: 6.1968e-04 Epoch 8/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0040 - val_loss: 6.9289e-04 Epoch 9/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0038 - val_loss: 6.4962e-04 Epoch 10/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0041 - val_loss: 6.4280e-04 Epoch 1/100
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
29/29 ━━━━━━━━━━━━━━━━━━━━ 10s 83ms/step - loss: 0.1057 - val_loss: 0.0072 Epoch 2/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 69ms/step - loss: 0.0110 - val_loss: 0.0022 Epoch 3/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0070 - val_loss: 0.0020 Epoch 4/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 65ms/step - loss: 0.0056 - val_loss: 0.0017 Epoch 5/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 66ms/step - loss: 0.0051 - val_loss: 0.0018 Epoch 6/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 66ms/step - loss: 0.0041 - val_loss: 0.0011 Epoch 7/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 85ms/step - loss: 0.0044 - val_loss: 0.0015 Epoch 8/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 66ms/step - loss: 0.0042 - val_loss: 0.0010 Epoch 9/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 61ms/step - loss: 0.0037 - val_loss: 9.7892e-04 Epoch 10/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0038 - val_loss: 9.2306e-04 Epoch 1/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 5s 55ms/step - loss: 0.1149 - val_loss: 0.0027 Epoch 2/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 47ms/step - loss: 0.0098 - val_loss: 0.0015 Epoch 3/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 1s 50ms/step - loss: 0.0063 - val_loss: 0.0011 Epoch 4/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0045 - val_loss: 0.0012 Epoch 5/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 68ms/step - loss: 0.0041 - val_loss: 5.8485e-04 Epoch 6/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 63ms/step - loss: 0.0034 - val_loss: 8.3915e-04 Epoch 7/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 68ms/step - loss: 0.0037 - val_loss: 7.9575e-04 Epoch 8/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 86ms/step - loss: 0.0030 - val_loss: 5.1530e-04 Epoch 9/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 67ms/step - loss: 0.0033 - val_loss: 4.7594e-04 Epoch 10/100 29/29 ━━━━━━━━━━━━━━━━━━━━ 2s 66ms/step - loss: 0.0026 - val_loss: 5.8443e-04
Out[ ]:
XGBRegressor(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, device=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
multi_strategy=None, n_estimators=100, n_jobs=None,
num_parallel_tree=None, random_state=None, ...)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
XGBRegressor(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, device=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
multi_strategy=None, n_estimators=100, n_jobs=None,
num_parallel_tree=None, random_state=None, ...)In [ ]:
# Prepare date range for future predictions
last_date = sab_stock['Date'].iloc[-1]
future_dates = pd.date_range(start=last_date, periods=91, inclusive='right')
# Function to plot future predictions
def plot_future_predictions(future_predictions, model_name):
plt.figure(figsize=(14, 7))
plt.plot(sab_stock['Date'], sab_stock['Close'], color='blue', label='Actual SAB Stock Price')
plt.plot(future_dates, future_predictions, color='red', label=f'Predicted SAB Stock Price ({model_name})')
plt.xlabel('Date')
plt.ylabel('Price')
plt.title(f'SAB Stock Price Prediction for Next 90 Days using {model_name}')
plt.legend()
plt.show()
# Plot future predictions for each model
plot_future_predictions(future_predictions_lstm, 'LSTM')
plot_future_predictions(future_predictions_gru, 'GRU')
plot_future_predictions(future_predictions_bi_lstm, 'Bidirectional LSTM')
plot_future_predictions(future_predictions_bi_gru, 'Bidirectional GRU')
plot_future_predictions(future_predictions_dt, 'Decision Tree')
plot_future_predictions(future_predictions_rf, 'Random Forest')
plot_future_predictions(future_predictions_xgb, 'XGBoost')
In [ ]:
# Display first few rows of each dataset
sab_stock_2018_2022_head = sab_stock_2018_2022.head()
sab_stock_2023_head = sab_stock_2023.head()
vn_index_2018_2023_head = vn_index_2018_2023.head()
(sab_stock_2018_2022_head, sab_stock_2023_head, vn_index_2018_2023_head)
# Display the structure of each dataset
sab_stock_2018_2022.info(), sab_stock_2023.info(), vn_index_2018_2023.info()
# Display first few rows of each dataset
sab_stock_2018_2022_head, sab_stock_2023_head, vn_index_2018_2023_head
<class 'pandas.core.frame.DataFrame'> RangeIndex: 1249 entries, 0 to 1248 Data columns (total 6 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date 1249 non-null datetime64[ns] 1 Close 1249 non-null float64 2 Open 1249 non-null float64 3 High 1249 non-null float64 4 Low 1249 non-null float64 5 Volume 1249 non-null int64 dtypes: datetime64[ns](1), float64(4), int64(1) memory usage: 58.7 KB <class 'pandas.core.frame.DataFrame'> RangeIndex: 249 entries, 0 to 248 Data columns (total 6 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date 249 non-null datetime64[ns] 1 Close 249 non-null float64 2 Open 249 non-null float64 3 High 249 non-null float64 4 Low 249 non-null float64 5 Volume 249 non-null int64 dtypes: datetime64[ns](1), float64(4), int64(1) memory usage: 11.8 KB <class 'pandas.core.frame.DataFrame'> RangeIndex: 1249 entries, 0 to 1248 Data columns (total 6 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date 1249 non-null datetime64[ns] 1 Close 1249 non-null float64 2 Open 1249 non-null float64 3 High 1249 non-null float64 4 Low 1249 non-null float64 5 Volume 1249 non-null int64 dtypes: datetime64[ns](1), float64(4), int64(1) memory usage: 58.7 KB
Out[ ]:
( Date Close Open High Low Volume
0 2018-01-02 126.15 127.00 127.35 124.95 116190
1 2018-01-03 132.50 126.15 132.50 126.15 231500
2 2018-01-04 133.75 134.00 134.35 131.50 238800
3 2018-01-05 132.15 134.00 134.25 131.55 131870
4 2018-01-08 130.50 131.50 132.00 129.50 115690,
Date Close Open High Low Volume
0 2023-01-03 84.50 85.55 86.75 84.10 121700
1 2023-01-04 85.75 84.60 87.00 84.50 116900
2 2023-01-05 87.10 86.00 91.75 85.75 255300
3 2023-01-06 90.35 88.60 91.30 87.10 130100
4 2023-01-09 89.80 89.75 91.00 87.90 46900,
Date Close Open High Low Volume
0 2018-01-02 995.77 986.05 996.18 984.24 172887390
1 2018-01-03 1005.67 999.86 1010.21 995.77 212432620
2 2018-01-04 1019.75 1009.37 1019.75 1005.67 235169670
3 2018-01-05 1012.65 1020.34 1020.60 1010.65 265519370
4 2018-01-08 1022.90 1011.36 1022.90 1004.89 234755510)
In [ ]:
# Combine SAB stock price data
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Function to process data for prediction
def process_data(data, n_steps=60):
data = data[['Close']].values
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(data)
X, y = [], []
for i in range(n_steps, len(scaled_data)):
X.append(scaled_data[i-n_steps:i, 0])
y.append(scaled_data[i, 0])
X, y = np.array(X), np.array(y)
X = np.reshape(X, (X.shape[0], X.shape[1], 1))
return X, y, scaler
# Prepare the data
n_steps = 60
X_sab, y_sab, scaler_sab = process_data(sab_stock, n_steps)
X_vn, y_vn, scaler_vn = process_data(vn_index_2018_2023, n_steps)
# Train-Test Split
split = int(len(X_sab) * 0.8)
X_train_sab, X_test_sab = X_sab[:split], X_sab[split:]
y_train_sab, y_test_sab = y_sab[:split], y_sab[split:]
split = int(len(X_vn) * 0.8)
X_train_vn, X_test_vn = X_vn[:split], X_vn[split:]
y_train_vn, y_test_vn = y_vn[:split], y_vn[split:]
In [ ]:
# Combine SAB stock price data
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Function to plot the resampled closing price over a given period
def plot_resampled(data, title):
data['Date'] = pd.to_datetime(data['Date'])
data.set_index('Date', inplace=True)
data_resampled = data.resample('D').mean()
plt.figure(figsize=(12, 6))
plt.plot(data_resampled.index, data_resampled['Close'], color='blue')
plt.title(title)
plt.xlabel('Date')
plt.ylabel('Closing Price')
plt.grid(True)
plt.show()
# Resample SAB stock data for the period 2018-2022 and plot
plot_resampled(sab_stock_2018_2022, 'Closed price resampled over day for mean (SAB 2018-2022)')
# Resample VN-Index data for the period 2018-2022 and plot
plot_resampled(vn_index_2018_2023[vn_index_2018_2023['Date'] < '2023-01-01'], 'Closed price resampled over day for mean (VN-Index 2018-2022)')
In [ ]:
# Print the first few rows of both dataframes to check if they contain the 'Date' column
print(sab_stock_2018_2022.head())
print(sab_stock_2023.head())
Close Open High Low Volume
Date
2018-01-02 126.15 127.00 127.35 124.95 116190
2018-01-03 132.50 126.15 132.50 126.15 231500
2018-01-04 133.75 134.00 134.35 131.50 238800
2018-01-05 132.15 134.00 134.25 131.55 131870
2018-01-08 130.50 131.50 132.00 129.50 115690
Date Close Open High Low Volume
0 2023-01-03 84.50 85.55 86.75 84.10 121700
1 2023-01-04 85.75 84.60 87.00 84.50 116900
2 2023-01-05 87.10 86.00 91.75 85.75 255300
3 2023-01-06 90.35 88.60 91.30 87.10 130100
4 2023-01-09 89.80 89.75 91.00 87.90 46900
In [ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM, Dropout, Dense
# Function to process data for prediction
def process_data(data, n_steps=60):
data = data[['Close']].values
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(data)
X, y = [], []
for i in range(n_steps, len(scaled_data)):
X.append(scaled_data[i-n_steps:i, 0])
y.append(scaled_data[i, 0])
X, y = np.array(X), np.array(y)
X = np.reshape(X, (X.shape[0], X.shape[1], 1))
return X, y, scaler
# Function to create LSTM model
def create_lstm_model(input_shape):
model = Sequential()
model.add(LSTM(units=50, return_sequences=True, input_shape=input_shape))
model.add(Dropout(0.2))
model.add(LSTM(units=50, return_sequences=True))
model.add(Dropout(0.2))
model.add(LSTM(units=50))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
# Function to predict the future stock prices
def predict_future(model, data, scaler, n_future=90):
predictions = []
last_data = data[-1]
for _ in range(n_future):
prediction = model.predict(last_data.reshape(1, last_data.shape[0], 1))
predictions.append(prediction[0, 0])
last_data = np.append(last_data[1:], prediction)
last_data = last_data.reshape(-1, 1)
predictions = scaler.inverse_transform(np.array(predictions).reshape(-1, 1))
return predictions
# Function to plot the future predictions
def plot_future_predictions(actual, future_pred, title):
plt.figure(figsize=(12, 6))
plt.plot(actual, color='blue', label='Actual Stock Price')
future_dates = pd.date_range(start=actual.index[-1], periods=len(future_pred) + 1)
plt.plot(future_dates[1:], future_pred, color='red', label='Future Predictions')
plt.title(title)
plt.xlabel('Date')
plt.ylabel('Stock Price')
plt.legend()
plt.grid(True)
plt.show()
# Combine SAB stock price data
sab_stock_2018_2022['Date'] = pd.to_datetime(sab_stock_2018_2022.index)
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023], ignore_index=True)
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Ensure all date values are valid
sab_stock['Date'] = pd.to_datetime(sab_stock['Date'], errors='coerce')
sab_stock.dropna(subset=['Date'], inplace=True)
# Prepare the data
n_steps = 60
X_sab, y_sab, scaler_sab = process_data(sab_stock, n_steps)
X_vn, y_vn, scaler_vn = process_data(vn_index_2018_2023, n_steps)
# Train-Test Split
split = int(len(X_sab) * 0.8)
X_train_sab, X_test_sab = X_sab[:split], X_sab[split:]
y_train_sab, y_test_sab = y_sab[:split], y_sab[split:]
split = int(len(X_vn) * 0.8)
X_train_vn, X_test_vn = X_vn[:split], X_vn[split:]
y_train_vn, y_test_vn = y_vn[:split], y_vn[split:]
# Create and train the model for SAB's stock price
lstm_model_sab = create_lstm_model((X_train_sab.shape[1], 1))
lstm_model_sab.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32)
# Create and train the model for VN-Index
lstm_model_vn = create_lstm_model((X_train_vn.shape[1], 1))
lstm_model_vn.fit(X_train_vn, y_train_vn, epochs=100, batch_size=32)
# Predict the next 90 days for VN-Index and plot
future_predictions_vn = predict_future(lstm_model_vn, X_test_vn, scaler_vn)
plot_future_predictions(vn_index_2018_2023.set_index('Date')['Close'], future_predictions_vn, 'Predicting Stock Time series 2024 (VN-Index)')
# Predict the next 90 days for SAB and plot
future_predictions_sab = predict_future(lstm_model_sab, X_test_sab, scaler_sab)
plot_future_predictions(sab_stock.set_index('Date')['Close'], future_predictions_sab, 'Predicting Stock Time series 2024 (SAB)')
Epoch 1/100
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
36/36 ━━━━━━━━━━━━━━━━━━━━ 7s 36ms/step - loss: 0.0772 Epoch 2/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0097 Epoch 3/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0058 Epoch 4/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0058 Epoch 5/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0053 Epoch 6/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0049 Epoch 7/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0049 Epoch 8/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0059 Epoch 9/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0051 Epoch 10/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0050 Epoch 11/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0046 Epoch 12/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0040 Epoch 13/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0039 Epoch 14/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 47ms/step - loss: 0.0040 Epoch 15/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0044 Epoch 16/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0040 Epoch 17/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0037 Epoch 18/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0040 Epoch 19/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0036 Epoch 20/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0035 Epoch 21/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0035 Epoch 22/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0042 Epoch 23/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0034 Epoch 24/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0029 Epoch 25/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0030 Epoch 26/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0036 Epoch 27/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0030 Epoch 28/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0031 Epoch 29/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0030 Epoch 30/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0027 Epoch 31/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0031 Epoch 32/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0029 Epoch 33/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0030 Epoch 34/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0029 Epoch 35/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0027 Epoch 36/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0027 Epoch 37/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0026 Epoch 38/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0028 Epoch 39/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0029 Epoch 40/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 47ms/step - loss: 0.0025 Epoch 41/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0028 Epoch 42/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0022 Epoch 43/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0023 Epoch 44/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0023 Epoch 45/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0022 Epoch 46/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0021 Epoch 47/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0025 Epoch 48/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0024 Epoch 49/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0021 Epoch 50/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0026 Epoch 51/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0021 Epoch 52/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0020 Epoch 53/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0021 Epoch 54/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0020 Epoch 55/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0019 Epoch 56/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0025 Epoch 57/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0020 Epoch 58/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0020 Epoch 59/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0018 Epoch 60/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0016 Epoch 61/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0019 Epoch 62/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0019 Epoch 63/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0018 Epoch 64/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0016 Epoch 65/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0017 Epoch 66/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0023 Epoch 67/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0019 Epoch 68/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0017 Epoch 69/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0015 Epoch 70/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0017 Epoch 71/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0017 Epoch 72/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0016 Epoch 73/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0016 Epoch 74/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0015 Epoch 75/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 76/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0015 Epoch 77/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0016 Epoch 78/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0017 Epoch 79/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0014 Epoch 80/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0016 Epoch 81/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 82/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0015 Epoch 83/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0014 Epoch 84/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0014 Epoch 85/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0014 Epoch 86/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 87/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 88/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 46ms/step - loss: 0.0013 Epoch 89/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0014 Epoch 90/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 42ms/step - loss: 0.0013 Epoch 91/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0014 Epoch 92/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0013 Epoch 93/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0014 Epoch 94/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0013 Epoch 95/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0012 Epoch 96/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 97/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 47ms/step - loss: 0.0013 Epoch 98/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0011 Epoch 99/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0012 Epoch 100/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0013 Epoch 1/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 32ms/step - loss: 0.0916 Epoch 2/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0079 Epoch 3/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0056 Epoch 4/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0052 Epoch 5/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0047 Epoch 6/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0043 Epoch 7/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0043 Epoch 8/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0045 Epoch 9/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0044 Epoch 10/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0036 Epoch 11/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0033 Epoch 12/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0043 Epoch 13/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 47ms/step - loss: 0.0048 Epoch 14/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0036 Epoch 15/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0038 Epoch 16/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0033 Epoch 17/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0033 Epoch 18/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0031 Epoch 19/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0042 Epoch 20/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0032 Epoch 21/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0031 Epoch 22/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0029 Epoch 23/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0028 Epoch 24/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0027 Epoch 25/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0029 Epoch 26/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 46ms/step - loss: 0.0027 Epoch 27/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0026 Epoch 28/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0025 Epoch 29/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0025 Epoch 30/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0023 Epoch 31/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0024 Epoch 32/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0022 Epoch 33/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0025 Epoch 34/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0026 Epoch 35/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0024 Epoch 36/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0032 Epoch 37/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0026 Epoch 38/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0023 Epoch 39/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0021 Epoch 40/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0018 Epoch 41/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0024 Epoch 42/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0022 Epoch 43/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0021 Epoch 44/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0018 Epoch 45/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0019 Epoch 46/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0018 Epoch 47/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0021 Epoch 48/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0022 Epoch 49/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0020 Epoch 50/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0018 Epoch 51/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0016 Epoch 52/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0021 Epoch 53/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0017 Epoch 54/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 47ms/step - loss: 0.0016 Epoch 55/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0020 Epoch 56/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0020 Epoch 57/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0020 Epoch 58/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0019 Epoch 59/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0015 Epoch 60/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0019 Epoch 61/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0014 Epoch 62/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0016 Epoch 63/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0016 Epoch 64/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0017 Epoch 65/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0021 Epoch 66/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0019 Epoch 67/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0014 Epoch 68/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0014 Epoch 69/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0015 Epoch 70/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0016 Epoch 71/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0018 Epoch 72/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0012 Epoch 73/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0012 Epoch 74/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0014 Epoch 75/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 76/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0013 Epoch 77/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0014 Epoch 78/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0013 Epoch 79/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0014 Epoch 80/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0012 Epoch 81/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0015 Epoch 82/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0014 Epoch 83/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0013 Epoch 84/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0013 Epoch 85/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0015 Epoch 86/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0011 Epoch 87/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0012 Epoch 88/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0013 Epoch 89/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0013 Epoch 90/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0012 Epoch 91/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0012 Epoch 92/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0011 Epoch 93/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0012 Epoch 94/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0012 Epoch 95/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0011 Epoch 96/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0011 Epoch 97/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0011 Epoch 98/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0010 Epoch 99/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0012 Epoch 100/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 30ms/step - loss: 0.0012 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 199ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 28ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 237ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step
In [ ]:
# tranh cai nay ra!
def plot_predictions(actual, future_pred, title):
plt.figure(figsize=(12, 6))
plt.plot(actual, color='red', label='Actual')
plt.plot(np.arange(len(actual), len(actual) + len(future_pred)), future_pred, color='cyan', label='Predicted')
plt.title(title)
plt.xlabel('Time')
plt.ylabel('Price')
plt.legend()
plt.grid(True)
plt.show()
plot_predictions(vn_index['Close'], future_predictions_vn, 'Predicting Stock Time Series 2024 (VN-Index)')
plot_predictions(sab_stock['Close'], future_predictions_sab, 'Predicting Stock Time Series 2024 (SAB)')
In [ ]:
import xgboost as xgb
# Data preparation
vn_index = pd.read_excel('VN-Index-2018-2023.xlsx')
sab_stock_2018_2022 = pd.read_excel('SAB stock price 2018-2022.xlsx')
sab_stock_2023 = pd.read_excel('SAB stock price 2023.xlsx')
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
def preprocess_data(data, column='Close'):
data = data[[column]].values
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(data)
X, y = [], []
for i in range(60, len(scaled_data)):
X.append(scaled_data[i-60:i, 0])
y.append(scaled_data[i, 0])
X, y = np.array(X), np.array(y)
X = np.reshape(X, (X.shape[0], X.shape[1], 1))
return X, y, scaler
# Preprocess data
X_vn_rnn, y_vn, vn_index_scaler = preprocess_data(vn_index)
X_sab_rnn, y_sab, sab_stock_scaler = preprocess_data(sab_stock)
X_vn_flat = X_vn_rnn.reshape(X_vn_rnn.shape[0], -1)
X_sab_flat = X_sab_rnn.reshape(X_sab_rnn.shape[0], -1)
# Define RNN models
def create_lstm_model(input_shape):
model = Sequential()
model.add(LSTM(units=50, return_sequences=True, input_shape=input_shape))
model.add(Dropout(0.2))
model.add(LSTM(units=50))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
def create_gru_model(input_shape):
model = Sequential()
model.add(GRU(units=50, return_sequences=True, input_shape=input_shape))
model.add(Dropout(0.2))
model.add(GRU(units=50))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
def create_bidirectional_lstm_model(input_shape):
model = Sequential()
model.add(Bidirectional(LSTM(units=50, return_sequences=True), input_shape=input_shape))
model.add(Dropout(0.2))
model.add(Bidirectional(LSTM(units=50)))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
def create_bidirectional_gru_model(input_shape):
model = Sequential()
model.add(Bidirectional(GRU(units=50, return_sequences=True), input_shape=input_shape))
model.add(Dropout(0.2))
model.add(Bidirectional(GRU(units=50)))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
# Define tree and ML models
tree_models = {
'Decision Tree': DecisionTreeRegressor(),
'Random Forest': RandomForestRegressor(),
'XGBoost': xgb.XGBRegressor()
}
ml_models = {
'SVM': SVR(),
'GMM': GaussianMixture(n_components=3),
'KMeans': KMeans(n_clusters=3)
}
# Train models
models = {
'LSTM': create_lstm_model((X_vn_rnn.shape[1], 1)),
'GRU': create_gru_model((X_vn_rnn.shape[1], 1)),
'Bidirectional LSTM': create_bidirectional_lstm_model((X_vn_rnn.shape[1], 1)),
'Bidirectional GRU': create_bidirectional_gru_model((X_vn_rnn.shape[1], 1))
}
for name, model in models.items():
model.fit(X_vn_rnn, y_vn, epochs=50, batch_size=32)
model.fit(X_sab_rnn, y_sab, epochs=50, batch_size=32)
for name, model in tree_models.items():
model.fit(X_vn_flat, y_vn)
model.fit(X_sab_flat, y_sab)
for name, model in ml_models.items():
model.fit(X_vn_flat, y_vn)
model.fit(X_sab_flat, y_sab)
def predict_rnn_future(model, data, scaler, days=180):
predictions = []
last_data = data[-60:].reshape((1, 60, 1)) # Ensure correct shape initially
for _ in range(days):
prediction = model.predict(last_data)
predictions.append(prediction[0, 0])
last_data = np.append(last_data[0][1:], prediction).reshape((1, 60, 1))
return scaler.inverse_transform(np.array(predictions).reshape(-1, 1))
def predict_tree_future(model, data, scaler, days=180):
future_predictions = []
last_data = data[-60:]
for _ in range(days):
prediction = model.predict(last_data.reshape(1, -1))
future_predictions.append(prediction[0])
last_data = np.append(last_data[1:], prediction)
return scaler.inverse_transform(np.array(future_predictions).reshape(-1, 1))
def predict_ml_future(model, data, scaler, days=180):
future_predictions = []
last_data = data[-60:]
for _ in range(days):
prediction = model.predict(last_data.reshape(1, -1))
future_predictions.append(prediction[0])
last_data = np.append(last_data[1:], prediction)
return scaler.inverse_transform(np.array(future_predictions).reshape(-1, 1))
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs) /Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
Epoch 1/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 3s 22ms/step - loss: 0.0918 Epoch 2/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0059 Epoch 3/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 26ms/step - loss: 0.0043 Epoch 4/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0035 Epoch 5/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0047 Epoch 6/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0043 Epoch 7/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0039 Epoch 8/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0035 Epoch 9/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0035 Epoch 10/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0035 Epoch 11/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0037 Epoch 12/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0046 Epoch 13/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0033 Epoch 14/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0040 Epoch 15/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0029 Epoch 16/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0028 Epoch 17/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0034 Epoch 18/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0033 Epoch 19/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0030 Epoch 20/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0031 Epoch 21/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0029 Epoch 22/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0027 Epoch 23/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0028 Epoch 24/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0028 Epoch 25/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0032 Epoch 26/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0025 Epoch 27/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0023 Epoch 28/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0026 Epoch 29/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0031 Epoch 30/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0020 Epoch 31/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0022 Epoch 32/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 25ms/step - loss: 0.0022 Epoch 33/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0021 Epoch 34/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0023 Epoch 35/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0021 Epoch 36/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 19ms/step - loss: 0.0020 Epoch 37/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0021 Epoch 38/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0021 Epoch 39/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0018 Epoch 40/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0020 Epoch 41/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0020 Epoch 42/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0019 Epoch 43/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0019 Epoch 44/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 19ms/step - loss: 0.0017 Epoch 45/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0018 Epoch 46/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0021 Epoch 47/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0019 Epoch 48/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0019 Epoch 49/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0022 Epoch 50/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0016 Epoch 1/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0018 Epoch 2/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0020 Epoch 3/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0017 Epoch 4/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 19ms/step - loss: 0.0016 Epoch 5/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0018 Epoch 6/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0017 Epoch 7/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0016 Epoch 8/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0018 Epoch 9/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0016 Epoch 10/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0015 Epoch 11/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0014 Epoch 12/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0014 Epoch 13/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0013 Epoch 14/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0015 Epoch 15/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0014 Epoch 16/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0014 Epoch 17/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0016 Epoch 18/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0015 Epoch 19/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0013 Epoch 20/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 21/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0013 Epoch 22/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 23/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0011 Epoch 24/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0014 Epoch 25/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0011 Epoch 26/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0013 Epoch 27/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0013 Epoch 28/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 29/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 30/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0011 Epoch 31/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 32/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0010 Epoch 33/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0011 Epoch 34/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 35/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 36/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 37/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0012 Epoch 38/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0011 Epoch 39/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0011 Epoch 40/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0012 Epoch 41/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0014 Epoch 42/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0011 Epoch 43/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0011 Epoch 44/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0011 Epoch 45/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0010 Epoch 46/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0011 Epoch 47/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 0.0010 Epoch 48/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0010 Epoch 49/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 21ms/step - loss: 9.9906e-04 Epoch 50/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 20ms/step - loss: 0.0011 Epoch 1/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 3s 22ms/step - loss: 0.0747 Epoch 2/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0061 Epoch 3/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0043 Epoch 4/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0037 Epoch 5/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0035 Epoch 6/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0037 Epoch 7/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0033 Epoch 8/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0034 Epoch 9/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0028 Epoch 10/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0035 Epoch 11/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0024 Epoch 12/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0025 Epoch 13/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0027 Epoch 14/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0027 Epoch 15/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0032 Epoch 16/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0028 Epoch 17/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0024 Epoch 18/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0023 Epoch 19/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0025 Epoch 20/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0024 Epoch 21/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0027 Epoch 22/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0021 Epoch 23/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0025 Epoch 24/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0022 Epoch 25/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0021 Epoch 26/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0022 Epoch 27/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0019 Epoch 28/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0019 Epoch 29/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0019 Epoch 30/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0020 Epoch 31/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0021 Epoch 32/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 0.0019 Epoch 33/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0016 Epoch 34/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0018 Epoch 35/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0020 Epoch 36/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0016 Epoch 37/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0017 Epoch 38/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0015 Epoch 39/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0018 Epoch 40/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0016 Epoch 41/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0015 Epoch 42/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0017 Epoch 43/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0014 Epoch 44/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0015 Epoch 45/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0017 Epoch 46/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0015 Epoch 47/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0016 Epoch 48/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0015 Epoch 49/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0016 Epoch 50/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0016 Epoch 1/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0015 Epoch 2/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0012 Epoch 3/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 4/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0012 Epoch 5/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0012 Epoch 6/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0013 Epoch 7/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0012 Epoch 8/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0013 Epoch 9/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 10/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 11/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 12/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0010 Epoch 13/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0011 Epoch 14/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 15/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0010 Epoch 16/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0011 Epoch 17/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0010 Epoch 18/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0012 Epoch 19/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 20/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0010 Epoch 21/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.3846e-04 Epoch 22/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0013 Epoch 23/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.0793e-04 Epoch 24/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 33ms/step - loss: 0.0011 Epoch 25/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.9681e-04 Epoch 26/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.4988e-04 Epoch 27/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0010 Epoch 28/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 0.0011 Epoch 29/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0010 Epoch 30/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 9.2840e-04 Epoch 31/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.7106e-04 Epoch 32/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 8.8376e-04 Epoch 33/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.5290e-04 Epoch 34/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 8.8359e-04 Epoch 35/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0010 Epoch 36/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 8.3872e-04 Epoch 37/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.8607e-04 Epoch 38/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.1203e-04 Epoch 39/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 8.5319e-04 Epoch 40/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 41/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 8.1367e-04 Epoch 42/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 8.9816e-04 Epoch 43/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 9.1244e-04 Epoch 44/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0010 Epoch 45/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 8.2622e-04 Epoch 46/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 7.7208e-04 Epoch 47/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 0.0011 Epoch 48/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 24ms/step - loss: 8.5717e-04 Epoch 49/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 23ms/step - loss: 8.9186e-04 Epoch 50/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 22ms/step - loss: 9.1608e-04 Epoch 1/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 4s 28ms/step - loss: 0.0691 Epoch 2/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 28ms/step - loss: 0.0071 Epoch 3/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0042 Epoch 4/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0036 Epoch 5/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0031 Epoch 6/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0036 Epoch 7/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0039 Epoch 8/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0025 Epoch 9/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0026 Epoch 10/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0025 Epoch 11/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0024 Epoch 12/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0025 Epoch 13/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0027 Epoch 14/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0027 Epoch 15/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0024 Epoch 16/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0026 Epoch 17/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0021 Epoch 18/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0023 Epoch 19/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0019 Epoch 20/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 58ms/step - loss: 0.0019 Epoch 21/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0019 Epoch 22/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0019 Epoch 23/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0026 Epoch 24/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0023 Epoch 25/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0016 Epoch 26/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0020 Epoch 27/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0017 Epoch 28/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0017 Epoch 29/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0018 Epoch 30/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0015 Epoch 31/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0018 Epoch 32/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 43ms/step - loss: 0.0021 Epoch 33/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0016 Epoch 34/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0015 Epoch 35/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0018 Epoch 36/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0015 Epoch 37/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0014 Epoch 38/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0021 Epoch 39/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0015 Epoch 40/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0019 Epoch 41/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0016 Epoch 42/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0014 Epoch 43/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0013 Epoch 44/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0014 Epoch 45/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0015 Epoch 46/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0013 Epoch 47/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0014 Epoch 48/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0016 Epoch 49/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0015 Epoch 50/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0015 Epoch 1/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0014 Epoch 2/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0013 Epoch 3/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0013 Epoch 4/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0012 Epoch 5/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0012 Epoch 6/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0013 Epoch 7/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 3s 56ms/step - loss: 0.0014 Epoch 8/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0012 Epoch 9/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0012 Epoch 10/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0013 Epoch 11/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0012 Epoch 12/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0014 Epoch 13/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 46ms/step - loss: 0.0012 Epoch 14/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0012 Epoch 15/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0013 Epoch 16/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0015 Epoch 17/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 18/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0012 Epoch 19/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 46ms/step - loss: 0.0011 Epoch 20/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 21/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0010 Epoch 22/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0012 Epoch 23/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 24/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0011 Epoch 25/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 26/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0010 Epoch 27/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 9.9736e-04 Epoch 28/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0010 Epoch 29/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 46ms/step - loss: 0.0010 Epoch 30/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0012 Epoch 31/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 32/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0010 Epoch 33/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0012 Epoch 34/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 35/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 9.8862e-04 Epoch 36/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0010 Epoch 37/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0011 Epoch 38/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 39/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 9.5583e-04 Epoch 40/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 46ms/step - loss: 9.3544e-04 Epoch 41/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0011 Epoch 42/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 9.8899e-04 Epoch 43/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 9.8047e-04 Epoch 44/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0010 Epoch 45/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 9.9971e-04 Epoch 46/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0011 Epoch 47/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 9.4746e-04 Epoch 48/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 9.6338e-04 Epoch 49/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 9.5717e-04 Epoch 50/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0012 Epoch 1/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 4s 32ms/step - loss: 0.1420 Epoch 2/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0090 Epoch 3/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 43ms/step - loss: 0.0045 Epoch 4/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0035 Epoch 5/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0030 Epoch 6/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0027 Epoch 7/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0027 Epoch 8/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0023 Epoch 9/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0023 Epoch 10/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0025 Epoch 11/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0022 Epoch 12/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0022 Epoch 13/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0019 Epoch 14/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0017 Epoch 15/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0018 Epoch 16/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0019 Epoch 17/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0020 Epoch 18/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0020 Epoch 19/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0022 Epoch 20/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0017 Epoch 21/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0018 Epoch 22/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0017 Epoch 23/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 3s 70ms/step - loss: 0.0015 Epoch 24/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0016 Epoch 25/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0017 Epoch 26/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0018 Epoch 27/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0016 Epoch 28/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0020 Epoch 29/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0018 Epoch 30/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0014 Epoch 31/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0017 Epoch 32/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 62ms/step - loss: 0.0014 Epoch 33/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0017 Epoch 34/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0018 Epoch 35/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0014 Epoch 36/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0017 Epoch 37/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0014 Epoch 38/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0015 Epoch 39/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0014 Epoch 40/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0013 Epoch 41/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0015 Epoch 42/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0015 Epoch 43/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0015 Epoch 44/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0017 Epoch 45/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0016 Epoch 46/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0015 Epoch 47/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0012 Epoch 48/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0013 Epoch 49/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0014 Epoch 50/50 38/38 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0015 Epoch 1/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0013 Epoch 2/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0013 Epoch 3/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0014 Epoch 4/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 5/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0012 Epoch 6/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0012 Epoch 7/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0010 Epoch 8/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0012 Epoch 9/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 10/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 11/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0011 Epoch 12/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0012 Epoch 13/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0012 Epoch 14/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 3s 61ms/step - loss: 0.0013 Epoch 15/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 16/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0011 Epoch 17/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0011 Epoch 18/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 19/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0013 Epoch 20/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0012 Epoch 21/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0012 Epoch 22/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0011 Epoch 23/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0012 Epoch 24/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 25/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 9.0291e-04 Epoch 26/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 9.8647e-04 Epoch 27/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0010 Epoch 28/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0011 Epoch 29/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0010 Epoch 30/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0012 Epoch 31/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0012 Epoch 32/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 9.8793e-04 Epoch 33/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 3s 62ms/step - loss: 0.0010 Epoch 34/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 48ms/step - loss: 9.7363e-04 Epoch 35/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 3s 75ms/step - loss: 0.0010 Epoch 36/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 3s 57ms/step - loss: 9.1478e-04 Epoch 37/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0010 Epoch 38/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 0.0011 Epoch 39/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 40/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 41/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 8.8362e-04 Epoch 42/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 9.8945e-04 Epoch 43/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 8.7493e-04 Epoch 44/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 49ms/step - loss: 9.5512e-04 Epoch 45/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0011 Epoch 46/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 3s 56ms/step - loss: 8.9134e-04 Epoch 47/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 3s 69ms/step - loss: 0.0010 Epoch 48/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 8.8814e-04 Epoch 49/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 9.0514e-04 Epoch 50/50 45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0010
In [ ]:
def predict_rnn_future(model, data, scaler, days=180):
predictions = []
last_data = data[-60:, 0].reshape((60, 1)) # Use only the last 60 days and ensure correct shape
last_data = last_data.reshape((1, 60, 1)) # Ensure correct shape initially
for _ in range(days):
prediction = model.predict(last_data)
predictions.append(prediction[0, 0])
last_data = np.append(last_data[:, 1:, :], np.reshape(prediction, (1, 1, 1)), axis=1)
return scaler.inverse_transform(np.array(predictions).reshape(-1, 1))
# Ensure the RNN input data is correctly shaped before making predictions
X_vn_rnn_reshaped = X_vn_rnn.reshape((X_vn_rnn.shape[0], X_vn_rnn.shape[1], 1))
X_sab_rnn_reshaped = X_sab_rnn.reshape((X_sab_rnn.shape[0], X_sab_rnn.shape[1], 1))
predictions_vn = {}
predictions_sab = {}
for name, model in models.items():
predictions_vn[name] = predict_rnn_future(model, X_vn_rnn_reshaped, vn_index_scaler)
predictions_sab[name] = predict_rnn_future(model, X_sab_rnn_reshaped, sab_stock_scaler)
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 298ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 28ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 41ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 30ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 23ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 24ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 174ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 34ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 30ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 23ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 34ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 24ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 44ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 149ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 61ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 27ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 27ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 54ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 25ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 60ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 11ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 31ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 44ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 318ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 33ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 61ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 40ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 37ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 28ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 21ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 45ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 24ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 41ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 92ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 21ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 62ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 45ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 34ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 21ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 24ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 68ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 23ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 52ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 31ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 31ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 54ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 86ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 52ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 44ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 23ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 62ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 385ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 54ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 49ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 27ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 28ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 42ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 29ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 32ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 32ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 42ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 39ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 21ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 33ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 24ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 26ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 14ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 20ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 22ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 24ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 33ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 18ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 17ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 16ms/step 1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 19ms/step
In [ ]:
# Combine SAB stock data
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Function to preprocess data
def preprocess_data(data, n_steps=60):
data = data[['Close']].values
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(data)
X, y = [], []
for i in range(n_steps, len(scaled_data)):
X.append(scaled_data[i-n_steps:i, 0])
y.append(scaled_data[i, 0])
X, y = np.array(X), np.array(y)
X = np.reshape(X, (X.shape[0], X.shape[1], 1))
return X, y, scaler
# Preprocess the data
n_steps = 60
X_vn, y_vn, scaler_vn = preprocess_data(vn_index, n_steps)
X_sab, y_sab, scaler_sab = preprocess_data(sab_stock, n_steps)
# Train-test split
split_vn = int(len(X_vn) * 0.8)
split_sab = int(len(X_sab) * 0.8)
X_train_vn, X_test_vn = X_vn[:split_vn], X_vn[split_vn:]
y_train_vn, y_test_vn = y_vn[:split_vn], y_vn[split_vn:]
X_train_sab, X_test_sab = X_sab[:split_sab], X_sab[split_sab:]
y_train_sab, y_test_sab = y_sab[:split_sab], y_sab[split_sab:]
In [ ]:
from tensorflow.keras.layers import GRU
def create_gru_model(input_shape):
model = Sequential()
model.add(GRU(units=50, return_sequences=True, input_shape=input_shape))
model.add(Dropout(0.2))
model.add(GRU(units=50, return_sequences=True))
model.add(Dropout(0.2))
model.add(GRU(units=50))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
# Create and train the GRU model
gru_model_vn = create_gru_model((X_train_vn.shape[1], 1))
gru_model_vn.fit(X_train_vn, y_train_vn, epochs=100, batch_size=32)
gru_model_sab = create_gru_model((X_train_sab.shape[1], 1))
gru_model_sab.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dense, Dropout
def create_lstm_model(input_shape):
model = Sequential()
model.add(LSTM(units=50, return_sequences=True, input_shape=input_shape))
model.add(Dropout(0.2))
model.add(LSTM(units=50, return_sequences=True))
model.add(Dropout(0.2))
model.add(LSTM(units=50))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
# Create and train the LSTM model
lstm_model_vn = create_lstm_model((X_train_vn.shape[1], 1))
lstm_model_vn.fit(X_train_vn, y_train_vn, epochs=100, batch_size=32)
lstm_model_sab = create_lstm_model((X_train_sab.shape[1], 1))
lstm_model_sab.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32)
from tensorflow.keras.layers import Bidirectional
def create_bidirectional_lstm_model(input_shape):
model = Sequential()
model.add(Bidirectional(LSTM(units=50, return_sequences=True), input_shape=input_shape))
model.add(Dropout(0.2))
model.add(Bidirectional(LSTM(units=50, return_sequences=True)))
model.add(Dropout(0.2))
model.add(Bidirectional(LSTM(units=50)))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
# Create and train the Bidirectional LSTM model
bidirectional_lstm_model_vn = create_bidirectional_lstm_model((X_train_vn.shape[1], 1))
bidirectional_lstm_model_vn.fit(X_train_vn, y_train_vn, epochs=100, batch_size=32)
bidirectional_lstm_model_sab = create_bidirectional_lstm_model((X_train_sab.shape[1], 1))
bidirectional_lstm_model_sab.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32)
def create_bidirectional_gru_model(input_shape):
model = Sequential()
model.add(Bidirectional(GRU(units=50, return_sequences=True), input_shape=input_shape))
model.add(Dropout(0.2))
model.add(Bidirectional(GRU(units=50, return_sequences=True)))
model.add(Dropout(0.2))
model.add(Bidirectional(GRU(units=50)))
model.add(Dropout(0.2))
model.add(Dense(units=1))
model.compile(optimizer='adam', loss='mean_squared_error')
return model
# Create and train the Bidirectional GRU model
bidirectional_gru_model_vn = create_bidirectional_gru_model((X_train_vn.shape[1], 1))
bidirectional_gru_model_vn.fit(X_train_vn, y_train_vn, epochs=100, batch_size=32)
bidirectional_gru_model_sab = create_bidirectional_gru_model((X_train_sab.shape[1], 1))
bidirectional_gru_model_sab.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32)
Epoch 1/100
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
30/30 ━━━━━━━━━━━━━━━━━━━━ 16s 39ms/step - loss: 0.0677 Epoch 2/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0085 Epoch 3/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0045 Epoch 4/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0047 Epoch 5/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0043 Epoch 6/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0034 Epoch 7/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0035 Epoch 8/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 46ms/step - loss: 0.0034 Epoch 9/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 46ms/step - loss: 0.0031 Epoch 10/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 47ms/step - loss: 0.0025 Epoch 11/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 49ms/step - loss: 0.0028 Epoch 12/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 47ms/step - loss: 0.0028 Epoch 13/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0031 Epoch 14/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0026 Epoch 15/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0026 Epoch 16/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0025 Epoch 17/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 42ms/step - loss: 0.0026 Epoch 18/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0024 Epoch 19/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 45ms/step - loss: 0.0028 Epoch 20/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0024 Epoch 21/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0023 Epoch 22/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0024 Epoch 23/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 46ms/step - loss: 0.0021 Epoch 24/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0023 Epoch 25/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 64ms/step - loss: 0.0022 Epoch 26/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0018 Epoch 27/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0020 Epoch 28/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 60ms/step - loss: 0.0018 Epoch 29/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 83ms/step - loss: 0.0017 Epoch 30/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0024 Epoch 31/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0020 Epoch 32/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0016 Epoch 33/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0016 Epoch 34/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 42ms/step - loss: 0.0023 Epoch 35/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 43ms/step - loss: 0.0016 Epoch 36/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0018 Epoch 37/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0019 Epoch 38/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0016 Epoch 39/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0016 Epoch 40/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0017 Epoch 41/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0017 Epoch 42/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0017 Epoch 43/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0018 Epoch 44/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0017 Epoch 45/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0015 Epoch 46/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 47/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0015 Epoch 48/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 49/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0017 Epoch 50/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0015 Epoch 51/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0012 Epoch 52/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0016 Epoch 53/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0017 Epoch 54/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0013 Epoch 55/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0013 Epoch 56/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0016 Epoch 57/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0013 Epoch 58/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0013 Epoch 59/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 60/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 61/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0011 Epoch 62/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0012 Epoch 63/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0012 Epoch 64/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0011 Epoch 65/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0013 Epoch 66/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0015 Epoch 67/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0014 Epoch 68/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0013 Epoch 69/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0013 Epoch 70/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0012 Epoch 71/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0013 Epoch 72/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0012 Epoch 73/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 74/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0011 Epoch 75/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 76/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0010 Epoch 77/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0011 Epoch 78/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0012 Epoch 79/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0013 Epoch 80/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0011 Epoch 81/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0012 Epoch 82/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0011 Epoch 83/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0013 Epoch 84/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0010 Epoch 85/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 8.5150e-04 Epoch 86/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 9.3717e-04 Epoch 87/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0012 Epoch 88/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0010 Epoch 89/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0012 Epoch 90/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 9.6617e-04 Epoch 91/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 9.8099e-04 Epoch 92/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 42ms/step - loss: 9.1383e-04 Epoch 93/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 8.6782e-04 Epoch 94/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 8.5837e-04 Epoch 95/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 62ms/step - loss: 0.0010 Epoch 96/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 8.1999e-04 Epoch 97/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 7.6598e-04 Epoch 98/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 9.0533e-04 Epoch 99/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0010 Epoch 100/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 8.4333e-04 Epoch 1/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 6s 51ms/step - loss: 0.0751 Epoch 2/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0064 Epoch 3/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0048 Epoch 4/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0048 Epoch 5/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0050 Epoch 6/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0041 Epoch 7/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0040 Epoch 8/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0036 Epoch 9/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0038 Epoch 10/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0034 Epoch 11/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0034 Epoch 12/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0038 Epoch 13/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0031 Epoch 14/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0030 Epoch 15/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0029 Epoch 16/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0029 Epoch 17/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0030 Epoch 18/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 43ms/step - loss: 0.0029 Epoch 19/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0025 Epoch 20/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0027 Epoch 21/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0024 Epoch 22/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0024 Epoch 23/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0025 Epoch 24/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0025 Epoch 25/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0021 Epoch 26/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0025 Epoch 27/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0026 Epoch 28/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0023 Epoch 29/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0021 Epoch 30/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0024 Epoch 31/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0022 Epoch 32/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0021 Epoch 33/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0022 Epoch 34/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0021 Epoch 35/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 43ms/step - loss: 0.0023 Epoch 36/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0021 Epoch 37/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0026 Epoch 38/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0019 Epoch 39/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0020 Epoch 40/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 47ms/step - loss: 0.0019 Epoch 41/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0021 Epoch 42/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0018 Epoch 43/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0018 Epoch 44/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 42ms/step - loss: 0.0018 Epoch 45/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 44ms/step - loss: 0.0019 Epoch 46/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0019 Epoch 47/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0018 Epoch 48/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 45ms/step - loss: 0.0017 Epoch 49/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 60ms/step - loss: 0.0017 Epoch 50/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 42ms/step - loss: 0.0018 Epoch 51/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 41ms/step - loss: 0.0016 Epoch 52/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0016 Epoch 53/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0015 Epoch 54/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0018 Epoch 55/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0017 Epoch 56/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0015 Epoch 57/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0017 Epoch 58/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 43ms/step - loss: 0.0016 Epoch 59/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0014 Epoch 60/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0014 Epoch 61/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0017 Epoch 62/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0016 Epoch 63/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0014 Epoch 64/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0017 Epoch 65/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 39ms/step - loss: 0.0015 Epoch 66/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0012 Epoch 67/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0016 Epoch 68/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0016 Epoch 69/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0014 Epoch 70/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0014 Epoch 71/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0011 Epoch 72/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0016 Epoch 73/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0014 Epoch 74/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0014 Epoch 75/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0013 Epoch 76/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0014 Epoch 77/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0013 Epoch 78/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0012 Epoch 79/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0013 Epoch 80/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0011 Epoch 81/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0015 Epoch 82/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0011 Epoch 83/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0015 Epoch 84/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0012 Epoch 85/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0013 Epoch 86/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0011 Epoch 87/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0011 Epoch 88/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0012 Epoch 89/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0013 Epoch 90/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0011 Epoch 91/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0012 Epoch 92/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0013 Epoch 93/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0012 Epoch 94/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 41ms/step - loss: 0.0010 Epoch 95/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0012 Epoch 96/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0011 Epoch 97/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0010 Epoch 98/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0012 Epoch 99/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 43ms/step - loss: 0.0010 Epoch 100/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 9.5716e-04 Epoch 1/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 4s 33ms/step - loss: 0.0661 Epoch 2/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0071 Epoch 3/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 31ms/step - loss: 0.0048 Epoch 4/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0050 Epoch 5/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0041 Epoch 6/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0043 Epoch 7/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0041 Epoch 8/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0032 Epoch 9/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0038 Epoch 10/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0039 Epoch 11/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0039 Epoch 12/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0037 Epoch 13/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0034 Epoch 14/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0038 Epoch 15/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0038 Epoch 16/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0033 Epoch 17/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0038 Epoch 18/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0031 Epoch 19/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0032 Epoch 20/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0030 Epoch 21/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0032 Epoch 22/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0025 Epoch 23/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0028 Epoch 24/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0028 Epoch 25/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0031 Epoch 26/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0028 Epoch 27/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0026 Epoch 28/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0028 Epoch 29/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0027 Epoch 30/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0025 Epoch 31/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0025 Epoch 32/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0023 Epoch 33/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0026 Epoch 34/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0023 Epoch 35/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0025 Epoch 36/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0026 Epoch 37/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0023 Epoch 38/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0021 Epoch 39/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0021 Epoch 40/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0020 Epoch 41/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 51ms/step - loss: 0.0026 Epoch 42/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0024 Epoch 43/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0020 Epoch 44/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0025 Epoch 45/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0017 Epoch 46/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0018 Epoch 47/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0021 Epoch 48/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0020 Epoch 49/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0018 Epoch 50/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0019 Epoch 51/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0019 Epoch 52/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0015 Epoch 53/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0018 Epoch 54/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0019 Epoch 55/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0018 Epoch 56/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0020 Epoch 57/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0017 Epoch 58/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0017 Epoch 59/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0017 Epoch 60/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0016 Epoch 61/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0018 Epoch 62/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0017 Epoch 63/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0019 Epoch 64/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0016 Epoch 65/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 66/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 67/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 68/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 69/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0019 Epoch 70/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0017 Epoch 71/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 72/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0013 Epoch 73/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 74/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 56ms/step - loss: 0.0014 Epoch 75/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 76/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 77/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 78/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0015 Epoch 79/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0012 Epoch 80/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 81/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0012 Epoch 82/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0013 Epoch 83/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0013 Epoch 84/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0013 Epoch 85/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0011 Epoch 86/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0012 Epoch 87/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0012 Epoch 88/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0012 Epoch 89/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0013 Epoch 90/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0012 Epoch 91/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 59ms/step - loss: 0.0012 Epoch 92/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0011 Epoch 93/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 94/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0014 Epoch 95/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0013 Epoch 96/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0011 Epoch 97/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0013 Epoch 98/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0012 Epoch 99/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0011 Epoch 100/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 9.5812e-04 Epoch 1/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 4s 33ms/step - loss: 0.1030 Epoch 2/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 32ms/step - loss: 0.0070 Epoch 3/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0069 Epoch 4/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0068 Epoch 5/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0056 Epoch 6/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0057 Epoch 7/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0055 Epoch 8/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0051 Epoch 9/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0050 Epoch 10/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0048 Epoch 11/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0052 Epoch 12/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0049 Epoch 13/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0049 Epoch 14/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0046 Epoch 15/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0045 Epoch 16/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0043 Epoch 17/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 52ms/step - loss: 0.0041 Epoch 18/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0041 Epoch 19/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0038 Epoch 20/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0035 Epoch 21/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0042 Epoch 22/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0033 Epoch 23/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0036 Epoch 24/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0034 Epoch 25/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0034 Epoch 26/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0031 Epoch 27/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0031 Epoch 28/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0030 Epoch 29/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 37ms/step - loss: 0.0031 Epoch 30/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0027 Epoch 31/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0028 Epoch 32/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 39ms/step - loss: 0.0030 Epoch 33/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0026 Epoch 34/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0026 Epoch 35/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0028 Epoch 36/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0030 Epoch 37/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0027 Epoch 38/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0027 Epoch 39/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0028 Epoch 40/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0025 Epoch 41/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0022 Epoch 42/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0023 Epoch 43/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0022 Epoch 44/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0022 Epoch 45/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0027 Epoch 46/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0027 Epoch 47/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0024 Epoch 48/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0023 Epoch 49/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0024 Epoch 50/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0024 Epoch 51/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0023 Epoch 52/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0022 Epoch 53/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0022 Epoch 54/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0018 Epoch 55/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0023 Epoch 56/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0023 Epoch 57/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 53ms/step - loss: 0.0021 Epoch 58/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0023 Epoch 59/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 38ms/step - loss: 0.0022 Epoch 60/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0018 Epoch 61/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0019 Epoch 62/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0019 Epoch 63/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0020 Epoch 64/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0017 Epoch 65/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0017 Epoch 66/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0017 Epoch 67/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0020 Epoch 68/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0020 Epoch 69/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0016 Epoch 70/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 55ms/step - loss: 0.0018 Epoch 71/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0018 Epoch 72/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0017 Epoch 73/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0016 Epoch 74/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0017 Epoch 75/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0015 Epoch 76/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0016 Epoch 77/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0016 Epoch 78/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0016 Epoch 79/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0016 Epoch 80/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0015 Epoch 81/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0015 Epoch 82/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0015 Epoch 83/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0014 Epoch 84/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 54ms/step - loss: 0.0015 Epoch 85/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0016 Epoch 86/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0015 Epoch 87/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0015 Epoch 88/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0016 Epoch 89/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0014 Epoch 90/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0016 Epoch 91/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0016 Epoch 92/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0013 Epoch 93/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0012 Epoch 94/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 33ms/step - loss: 0.0015 Epoch 95/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 36ms/step - loss: 0.0013 Epoch 96/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0013 Epoch 97/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 50ms/step - loss: 0.0013 Epoch 98/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step - loss: 0.0016 Epoch 99/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 35ms/step - loss: 0.0012 Epoch 100/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 1s 34ms/step - loss: 0.0013 Epoch 1/100
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
30/30 ━━━━━━━━━━━━━━━━━━━━ 6s 46ms/step - loss: 0.0477 Epoch 2/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 42ms/step - loss: 0.0048 Epoch 3/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 42ms/step - loss: 0.0042 Epoch 4/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0035 Epoch 5/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 90ms/step - loss: 0.0035 Epoch 6/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 0.0029 Epoch 7/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0028 Epoch 8/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0031 Epoch 9/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0029 Epoch 10/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 74ms/step - loss: 0.0024 Epoch 11/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 0.0023 Epoch 12/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0021 Epoch 13/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 91ms/step - loss: 0.0019 Epoch 14/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0020 Epoch 15/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0023 Epoch 16/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0027 Epoch 17/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0020 Epoch 18/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0022 Epoch 19/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0017 Epoch 20/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 73ms/step - loss: 0.0021 Epoch 21/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 94ms/step - loss: 0.0018 Epoch 22/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 73ms/step - loss: 0.0017 Epoch 23/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0018 Epoch 24/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0018 Epoch 25/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0018 Epoch 26/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 74ms/step - loss: 0.0016 Epoch 27/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0016 Epoch 28/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 73ms/step - loss: 0.0014 Epoch 29/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 93ms/step - loss: 0.0016 Epoch 30/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 73ms/step - loss: 0.0012 Epoch 31/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0016 Epoch 32/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 96ms/step - loss: 0.0023 Epoch 33/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 0.0019 Epoch 34/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 83ms/step - loss: 0.0013 Epoch 35/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 0.0014 Epoch 36/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 87ms/step - loss: 0.0015 Epoch 37/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0016 Epoch 38/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 75ms/step - loss: 0.0013 Epoch 39/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 74ms/step - loss: 0.0014 Epoch 40/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0013 Epoch 41/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 100ms/step - loss: 0.0014 Epoch 42/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 86ms/step - loss: 0.0016 Epoch 43/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 0.0014 Epoch 44/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0013 Epoch 45/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0014 Epoch 46/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 74ms/step - loss: 0.0014 Epoch 47/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 93ms/step - loss: 0.0013 Epoch 48/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 0.0013 Epoch 49/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 81ms/step - loss: 0.0013 Epoch 50/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0014 Epoch 51/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0013 Epoch 52/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0012 Epoch 53/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0011 Epoch 54/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 75ms/step - loss: 0.0014 Epoch 55/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 75ms/step - loss: 0.0016 Epoch 56/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 74ms/step - loss: 0.0011 Epoch 57/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 0.0013 Epoch 58/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 100ms/step - loss: 0.0014 Epoch 59/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 81ms/step - loss: 0.0012 Epoch 60/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0011 Epoch 61/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0010 Epoch 62/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0012 Epoch 63/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 0.0010 Epoch 64/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 9.7358e-04 Epoch 65/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0013 Epoch 66/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 0.0015 Epoch 67/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0011 Epoch 68/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0012 Epoch 69/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0014 Epoch 70/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0011 Epoch 71/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0011 Epoch 72/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0010 Epoch 73/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0011 Epoch 74/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 9.6154e-04 Epoch 75/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 69ms/step - loss: 9.3089e-04 Epoch 76/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0012 Epoch 77/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0011 Epoch 78/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 69ms/step - loss: 8.7716e-04 Epoch 79/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 9.0598e-04 Epoch 80/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 9.9772e-04 Epoch 81/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 0.0012 Epoch 82/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 8.8833e-04 Epoch 83/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0011 Epoch 84/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 0.0012 Epoch 85/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0011 Epoch 86/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 73ms/step - loss: 8.9063e-04 Epoch 87/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0012 Epoch 88/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 9.4383e-04 Epoch 89/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 8.0798e-04 Epoch 90/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 9.5210e-04 Epoch 91/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0010 Epoch 92/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 72ms/step - loss: 9.3754e-04 Epoch 93/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 75ms/step - loss: 9.5417e-04 Epoch 94/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 9.9397e-04 Epoch 95/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 0.0012 Epoch 96/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 9.7903e-04 Epoch 97/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 71ms/step - loss: 9.4020e-04 Epoch 98/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 9.4218e-04 Epoch 99/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 70ms/step - loss: 9.9187e-04 Epoch 100/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 69ms/step - loss: 8.6089e-04 Epoch 1/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 6s 45ms/step - loss: 0.0820 Epoch 2/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 43ms/step - loss: 0.0078 Epoch 3/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 57ms/step - loss: 0.0060 Epoch 4/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0042 Epoch 5/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0040 Epoch 6/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0045 Epoch 7/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0041 Epoch 8/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0031 Epoch 9/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0040 Epoch 10/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0033 Epoch 11/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0031 Epoch 12/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0029 Epoch 13/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0027 Epoch 14/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0029 Epoch 15/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0025 Epoch 16/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0023 Epoch 17/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0024 Epoch 18/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0026 Epoch 19/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0024 Epoch 20/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0020 Epoch 21/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0020 Epoch 22/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0021 Epoch 23/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0019 Epoch 24/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0020 Epoch 25/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0020 Epoch 26/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0019 Epoch 27/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 70ms/step - loss: 0.0022 Epoch 28/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0020 Epoch 29/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0019 Epoch 30/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0020 Epoch 31/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0021 Epoch 32/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0021 Epoch 33/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0017 Epoch 34/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0017 Epoch 35/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0017 Epoch 36/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0018 Epoch 37/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0017 Epoch 38/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0017 Epoch 39/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0016 Epoch 40/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0017 Epoch 41/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0018 Epoch 42/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0017 Epoch 43/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0016 Epoch 44/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0015 Epoch 45/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0017 Epoch 46/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0017 Epoch 47/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 4s 107ms/step - loss: 0.0018 Epoch 48/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 4s 112ms/step - loss: 0.0017 Epoch 49/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 92ms/step - loss: 0.0015 Epoch 50/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 4s 115ms/step - loss: 0.0016 Epoch 51/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0019 Epoch 52/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0018 Epoch 53/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0013 Epoch 54/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 76ms/step - loss: 0.0016 Epoch 55/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 82ms/step - loss: 0.0017 Epoch 56/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0016 Epoch 57/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0016 Epoch 58/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0015 Epoch 59/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0016 Epoch 60/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0014 Epoch 61/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0015 Epoch 62/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 75ms/step - loss: 0.0016 Epoch 63/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0015 Epoch 64/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 4s 95ms/step - loss: 0.0015 Epoch 65/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0015 Epoch 66/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 75ms/step - loss: 0.0015 Epoch 67/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0015 Epoch 68/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 73ms/step - loss: 0.0019 Epoch 69/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0015 Epoch 70/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0012 Epoch 71/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0014 Epoch 72/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0013 Epoch 73/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0013 Epoch 74/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 70ms/step - loss: 0.0012 Epoch 75/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0012 Epoch 76/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 75ms/step - loss: 0.0012 Epoch 77/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0014 Epoch 78/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0016 Epoch 79/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 70ms/step - loss: 0.0015 Epoch 80/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0015 Epoch 81/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0012 Epoch 82/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0014 Epoch 83/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 76ms/step - loss: 0.0015 Epoch 84/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0012 Epoch 85/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0011 Epoch 86/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0013 Epoch 87/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 74ms/step - loss: 0.0012 Epoch 88/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0013 Epoch 89/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0013 Epoch 90/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 70ms/step - loss: 0.0013 Epoch 91/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 70ms/step - loss: 0.0012 Epoch 92/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0015 Epoch 93/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0013 Epoch 94/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0013 Epoch 95/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 72ms/step - loss: 0.0012 Epoch 96/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0014 Epoch 97/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0012 Epoch 98/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0012 Epoch 99/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0011 Epoch 100/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 71ms/step - loss: 0.0011 Epoch 1/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 7s 57ms/step - loss: 0.0680 Epoch 2/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 47ms/step - loss: 0.0052 Epoch 3/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 1s 47ms/step - loss: 0.0035 Epoch 4/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 82ms/step - loss: 0.0028 Epoch 5/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0028 Epoch 6/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0024 Epoch 7/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0023 Epoch 8/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0022 Epoch 9/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0021 Epoch 10/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0020 Epoch 11/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0019 Epoch 12/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 81ms/step - loss: 0.0021 Epoch 13/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0021 Epoch 14/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0020 Epoch 15/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0021 Epoch 16/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0020 Epoch 17/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0018 Epoch 18/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0021 Epoch 19/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0018 Epoch 20/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0015 Epoch 21/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0018 Epoch 22/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0016 Epoch 23/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0014 Epoch 24/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 81ms/step - loss: 0.0015 Epoch 25/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0014 Epoch 26/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0016 Epoch 27/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0020 Epoch 28/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0014 Epoch 29/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0016 Epoch 30/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0014 Epoch 31/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0015 Epoch 32/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0015 Epoch 33/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0013 Epoch 34/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0013 Epoch 35/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0014 Epoch 36/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0013 Epoch 37/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0014 Epoch 38/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0017 Epoch 39/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0015 Epoch 40/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0015 Epoch 41/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0016 Epoch 42/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0013 Epoch 43/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0013 Epoch 44/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0012 Epoch 45/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0012 Epoch 46/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0013 Epoch 47/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 0.0014 Epoch 48/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0016 Epoch 49/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0013 Epoch 50/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0012 Epoch 51/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0011 Epoch 52/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0016 Epoch 53/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0013 Epoch 54/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0015 Epoch 55/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0012 Epoch 56/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0013 Epoch 57/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 0.0012 Epoch 58/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0012 Epoch 59/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0014 Epoch 60/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 9.6982e-04 Epoch 61/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 81ms/step - loss: 0.0011 Epoch 62/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0010 Epoch 63/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 81ms/step - loss: 0.0011 Epoch 64/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 9.9601e-04 Epoch 65/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0012 Epoch 66/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0013 Epoch 67/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0012 Epoch 68/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0012 Epoch 69/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 81ms/step - loss: 0.0012 Epoch 70/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 80ms/step - loss: 9.9004e-04 Epoch 71/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0011 Epoch 72/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 84ms/step - loss: 0.0011 Epoch 73/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 9.5263e-04 Epoch 74/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0013 Epoch 75/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0011 Epoch 76/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 9.3975e-04 Epoch 77/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0011 Epoch 78/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0013 Epoch 79/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 9.8219e-04 Epoch 80/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0011 Epoch 81/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 98ms/step - loss: 9.7195e-04 Epoch 82/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 85ms/step - loss: 0.0011 Epoch 83/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 100ms/step - loss: 9.7576e-04 Epoch 84/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 84ms/step - loss: 0.0011 Epoch 85/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 3s 85ms/step - loss: 0.0012 Epoch 86/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0011 Epoch 87/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 9.3958e-04 Epoch 88/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0010 Epoch 89/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0011 Epoch 90/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0010 Epoch 91/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 9.2884e-04 Epoch 92/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 0.0011 Epoch 93/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 79ms/step - loss: 0.0011 Epoch 94/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 9.6371e-04 Epoch 95/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 78ms/step - loss: 9.1505e-04 Epoch 96/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0010 Epoch 97/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0011 Epoch 98/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 9.3425e-04 Epoch 99/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 77ms/step - loss: 0.0010 Epoch 100/100 30/30 ━━━━━━━━━━━━━━━━━━━━ 2s 76ms/step - loss: 0.0011 Epoch 1/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 7s 47ms/step - loss: 0.0950 Epoch 2/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 47ms/step - loss: 0.0097 Epoch 3/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 65ms/step - loss: 0.0040 Epoch 4/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0032 Epoch 5/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 83ms/step - loss: 0.0033 Epoch 6/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0030 Epoch 7/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0030 Epoch 8/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0028 Epoch 9/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0025 Epoch 10/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 76ms/step - loss: 0.0027 Epoch 11/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0027 Epoch 12/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0025 Epoch 13/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 84ms/step - loss: 0.0026 Epoch 14/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 84ms/step - loss: 0.0027 Epoch 15/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0022 Epoch 16/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0021 Epoch 17/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0023 Epoch 18/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0023 Epoch 19/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0020 Epoch 20/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0024 Epoch 21/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0023 Epoch 22/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 82ms/step - loss: 0.0022 Epoch 23/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0018 Epoch 24/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0017 Epoch 25/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 83ms/step - loss: 0.0017 Epoch 26/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 88ms/step - loss: 0.0017 Epoch 27/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0018 Epoch 28/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0020 Epoch 29/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 76ms/step - loss: 0.0022 Epoch 30/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0023 Epoch 31/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 4s 103ms/step - loss: 0.0018 Epoch 32/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 87ms/step - loss: 0.0021 Epoch 33/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 87ms/step - loss: 0.0017 Epoch 34/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0018 Epoch 35/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0019 Epoch 36/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0020 Epoch 37/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0016 Epoch 38/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 84ms/step - loss: 0.0015 Epoch 39/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0015 Epoch 40/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0018 Epoch 41/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 84ms/step - loss: 0.0015 Epoch 42/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0017 Epoch 43/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0018 Epoch 44/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0015 Epoch 45/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 85ms/step - loss: 0.0016 Epoch 46/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0014 Epoch 47/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 85ms/step - loss: 0.0019 Epoch 48/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0014 Epoch 49/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0015 Epoch 50/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0019 Epoch 51/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0015 Epoch 52/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0014 Epoch 53/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 84ms/step - loss: 0.0016 Epoch 54/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0015 Epoch 55/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0016 Epoch 56/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 82ms/step - loss: 0.0015 Epoch 57/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0015 Epoch 58/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0014 Epoch 59/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0014 Epoch 60/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 76ms/step - loss: 0.0014 Epoch 61/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0014 Epoch 62/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 85ms/step - loss: 0.0013 Epoch 63/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0014 Epoch 64/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0015 Epoch 65/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0011 Epoch 66/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 4s 100ms/step - loss: 0.0014 Epoch 67/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0017 Epoch 68/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0013 Epoch 69/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0014 Epoch 70/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0015 Epoch 71/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0014 Epoch 72/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0013 Epoch 73/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0015 Epoch 74/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0013 Epoch 75/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0015 Epoch 76/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 92ms/step - loss: 0.0013 Epoch 77/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 81ms/step - loss: 0.0013 Epoch 78/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0011 Epoch 79/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0011 Epoch 80/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0012 Epoch 81/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 76ms/step - loss: 0.0012 Epoch 82/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0012 Epoch 83/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0011 Epoch 84/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0013 Epoch 85/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0012 Epoch 86/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0013 Epoch 87/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 85ms/step - loss: 0.0013 Epoch 88/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0012 Epoch 89/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0011 Epoch 90/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0014 Epoch 91/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 76ms/step - loss: 0.0013 Epoch 92/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 78ms/step - loss: 0.0011 Epoch 93/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0013 Epoch 94/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0012 Epoch 95/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0011 Epoch 96/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 80ms/step - loss: 0.0014 Epoch 97/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0012 Epoch 98/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 79ms/step - loss: 0.0011 Epoch 99/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0011 Epoch 100/100 36/36 ━━━━━━━━━━━━━━━━━━━━ 3s 77ms/step - loss: 0.0011
Out[ ]:
<keras.src.callbacks.history.History at 0x55a0bc740>
In [ ]:
def predict(model, X):
return model.predict(X)
def plot_comparison(actual_train, actual_valid, train_pred, valid_pred, title):
plt.figure(figsize=(12, 6))
plt.plot(actual_train, color='blue', label='Actual Train Stock Price')
plt.plot(range(len(actual_train), len(actual_train) + len(actual_valid)), actual_valid, color='blue')
plt.plot(train_pred, color='red', label='Train Predictions')
plt.plot(range(len(train_pred), len(train_pred) + len(valid_pred)), valid_pred, color='green', label='Valid Predictions')
plt.title(title)
plt.xlabel('Time')
plt.ylabel('Stock Price')
plt.legend()
plt.grid(True)
plt.show()
In [ ]:
# LSTM Predictions
train_predictions_vn_lstm = predict(lstm_model_vn, X_train_vn)
valid_predictions_vn_lstm = predict(lstm_model_vn, X_test_vn)
train_predictions_sab_lstm = predict(lstm_model_sab, X_train_sab)
valid_predictions_sab_lstm = predict(lstm_model_sab, X_test_sab)
# GRU Predictions
train_predictions_vn_gru = predict(gru_model_vn, X_train_vn)
valid_predictions_vn_gru = predict(gru_model_vn, X_test_vn)
train_predictions_sab_gru = predict(gru_model_sab, X_train_sab)
valid_predictions_sab_gru = predict(gru_model_sab, X_test_sab)
# Bidirectional LSTM Predictions
train_predictions_vn_bilstm = predict(bidirectional_lstm_model_vn, X_train_vn)
valid_predictions_vn_bilstm = predict(bidirectional_lstm_model_vn, X_test_vn)
train_predictions_sab_bilstm = predict(bidirectional_lstm_model_sab, X_train_sab)
valid_predictions_sab_bilstm = predict(bidirectional_lstm_model_sab, X_test_sab)
# Bidirectional GRU Predictions
train_predictions_vn_bigru = predict(bidirectional_gru_model_vn, X_train_vn)
valid_predictions_vn_bigru = predict(bidirectional_gru_model_vn, X_test_vn)
train_predictions_sab_bigru = predict(bidirectional_gru_model_sab, X_train_sab)
valid_predictions_sab_bigru = predict(bidirectional_gru_model_sab, X_test_sab)
# Inverse transform predictions
train_predictions_vn_lstm = scaler_vn.inverse_transform(train_predictions_vn_lstm)
valid_predictions_vn_lstm = scaler_vn.inverse_transform(valid_predictions_vn_lstm)
train_predictions_sab_lstm = scaler_sab.inverse_transform(train_predictions_sab_lstm)
valid_predictions_sab_lstm = scaler_sab.inverse_transform(valid_predictions_sab_lstm)
train_predictions_vn_gru = scaler_vn.inverse_transform(train_predictions_vn_gru)
valid_predictions_vn_gru = scaler_vn.inverse_transform(valid_predictions_vn_gru)
train_predictions_sab_gru = scaler_sab.inverse_transform(train_predictions_sab_gru)
valid_predictions_sab_gru = scaler_sab.inverse_transform(valid_predictions_sab_gru)
train_predictions_vn_bilstm = scaler_vn.inverse_transform(train_predictions_vn_bilstm)
valid_predictions_vn_bilstm = scaler_vn.inverse_transform(valid_predictions_vn_bilstm)
train_predictions_sab_bilstm = scaler_sab.inverse_transform(train_predictions_sab_bilstm)
valid_predictions_sab_bilstm = scaler_sab.inverse_transform(valid_predictions_sab_bilstm)
train_predictions_vn_bigru = scaler_vn.inverse_transform(train_predictions_vn_bigru)
valid_predictions_vn_bigru = scaler_vn.inverse_transform(valid_predictions_vn_bigru)
train_predictions_sab_bigru = scaler_sab.inverse_transform(train_predictions_sab_bigru)
valid_predictions_sab_bigru = scaler_sab.inverse_transform(valid_predictions_sab_bigru)
# Inverse transform actual values for plotting
y_train_vn = scaler_vn.inverse_transform(y_train_vn.reshape(-1, 1))
y_test_vn = scaler_vn.inverse_transform(y_test_vn.reshape(-1, 1))
y_train_sab = scaler_sab.inverse_transform(y_train_sab.reshape(-1, 1))
y_test_sab = scaler_sab.inverse_transform(y_test_sab.reshape(-1, 1))
# Plotting for each model
def plot_comparison(actual_train, actual_valid, train_pred, valid_pred, title):
plt.figure(figsize=(12, 6))
plt.plot(actual_train, color='blue', label='Actual Train Stock Price')
plt.plot(range(len(actual_train), len(actual_train) + len(actual_valid)), actual_valid, color='blue')
plt.plot(train_pred, color='red', label='Train Predictions')
plt.plot(range(len(train_pred), len(train_pred) + len(valid_pred)), valid_pred, color='green', label='Valid Predictions')
plt.title(title)
plt.xlabel('Time')
plt.ylabel('Stock Price')
plt.legend()
plt.grid(True)
plt.show()
# Plot for VN-Index
plot_comparison(y_train_vn, y_test_vn, train_predictions_vn_lstm, valid_predictions_vn_lstm, 'Predicting Stock Time Series 2024 (VN-Index) - LSTM')
plot_comparison(y_train_vn, y_test_vn, train_predictions_vn_gru, valid_predictions_vn_gru, 'Predicting Stock Time Series 2024 (VN-Index) - GRU')
plot_comparison(y_train_vn, y_test_vn, train_predictions_vn_bilstm, valid_predictions_vn_bilstm, 'Predicting Stock Time Series 2024 (VN-Index) - Bidirectional LSTM')
plot_comparison(y_train_vn, y_test_vn, train_predictions_vn_bigru, valid_predictions_vn_bigru, 'Predicting Stock Time Series 2024 (VN-Index) - Bidirectional GRU')
# Plot for SAB
plot_comparison(y_train_sab, y_test_sab, train_predictions_sab_lstm, valid_predictions_sab_lstm, 'Predicting Stock Time Series 2024 (SAB) - LSTM')
plot_comparison(y_train_sab, y_test_sab, train_predictions_sab_gru, valid_predictions_sab_gru, 'Predicting Stock Time Series 2024 (SAB) - GRU')
plot_comparison(y_train_sab, y_test_sab, train_predictions_sab_bilstm, valid_predictions_sab_bilstm, 'Predicting Stock Time Series 2024 (SAB) - Bidirectional LSTM')
plot_comparison(y_train_sab, y_test_sab, train_predictions_sab_bigru, valid_predictions_sab_bigru, 'Predicting Stock Time Series 2024 (SAB) - Bidirectional GRU')
30/30 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 36/36 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 9/9 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 30/30 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 36/36 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 9/9 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step 30/30 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 36/36 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 9/9 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 30/30 ━━━━━━━━━━━━━━━━━━━━ 0s 13ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 12ms/step 36/36 ━━━━━━━━━━━━━━━━━━━━ 2s 31ms/step 9/9 ━━━━━━━━━━━━━━━━━━━━ 0s 15ms/step
In [ ]:
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, GRU, Dropout, Dense, Bidirectional
# Data preparation
vn_index = pd.read_excel('VN-Index-2018-2023.xlsx')
sab_stock_2018_2022 = pd.read_excel('SAB stock price 2018-2022.xlsx')
sab_stock_2023 = pd.read_excel('SAB stock price 2023.xlsx')
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Preprocess your data (example for VN-Index)
scaler_vn = MinMaxScaler(feature_range=(0, 1))
scaled_data_vn = scaler_vn.fit_transform(vn_index['Close'].values.reshape(-1, 1))
# Create sequences
def create_sequences(data, seq_length):
X = []
y = []
for i in range(len(data) - seq_length):
X.append(data[i:i+seq_length])
y.append(data[i+seq_length])
return np.array(X), np.array(y)
seq_length = 60
X_vn, y_vn = create_sequences(scaled_data_vn, seq_length)
# Split data into training and testing
split = int(0.8 * len(X_vn))
X_train_vn, X_test_vn = X_vn[:split], X_vn[split:]
y_train_vn, y_test_vn = y_vn[:split], y_vn[split:]
# Define models
# LSTM Model
lstm_model = Sequential()
lstm_model.add(LSTM(64, return_sequences=True, input_shape=(X_train_vn.shape[1], 1)))
lstm_model.add(Dropout(0.2))
lstm_model.add(LSTM(64, return_sequences=True))
lstm_model.add(Dropout(0.2))
lstm_model.add(LSTM(64))
lstm_model.add(Dropout(0.2))
lstm_model.add(Dense(1))
lstm_model.summary()
# GRU Model
gru_model = Sequential()
gru_model.add(GRU(64, return_sequences=True, input_shape=(X_train_vn.shape[1], 1)))
gru_model.add(Dropout(0.2))
gru_model.add(GRU(64, return_sequences=True))
gru_model.add(Dropout(0.2))
gru_model.add(GRU(64))
gru_model.add(Dropout(0.2))
gru_model.add(Dense(1))
gru_model.summary()
# Bidirectional LSTM Model
bidirectional_lstm_model = Sequential()
bidirectional_lstm_model.add(Bidirectional(LSTM(64, return_sequences=True), input_shape=(X_train_vn.shape[1], 1)))
bidirectional_lstm_model.add(Dropout(0.2))
bidirectional_lstm_model.add(Bidirectional(LSTM(64, return_sequences=True)))
bidirectional_lstm_model.add(Dropout(0.2))
bidirectional_lstm_model.add(Bidirectional(LSTM(64)))
bidirectional_lstm_model.add(Dropout(0.2))
bidirectional_lstm_model.add(Dense(1))
bidirectional_lstm_model.summary()
# Bidirectional GRU Model
bidirectional_gru_model = Sequential()
bidirectional_gru_model.add(Bidirectional(GRU(64, return_sequences=True), input_shape=(X_train_vn.shape[1], 1)))
bidirectional_gru_model.add(Dropout(0.2))
bidirectional_gru_model.add(Bidirectional(GRU(64, return_sequences=True)))
bidirectional_gru_model.add(Dropout(0.2))
bidirectional_gru_model.add(Bidirectional(GRU(64)))
bidirectional_gru_model.add(Dropout(0.2))
bidirectional_gru_model.add(Dense(1))
bidirectional_gru_model.summary()
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
Model: "sequential_17"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓ ┃ Layer (type) ┃ Output Shape ┃ Param # ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩ │ lstm_12 (LSTM) │ (None, 60, 64) │ 16,896 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_40 (Dropout) │ (None, 60, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ lstm_13 (LSTM) │ (None, 60, 64) │ 33,024 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_41 (Dropout) │ (None, 60, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ lstm_14 (LSTM) │ (None, 64) │ 33,024 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_42 (Dropout) │ (None, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dense_16 (Dense) │ (None, 1) │ 65 │ └─────────────────────────────────┴────────────────────────┴───────────────┘
Total params: 83,009 (324.25 KB)
Trainable params: 83,009 (324.25 KB)
Non-trainable params: 0 (0.00 B)
Model: "sequential_18"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓ ┃ Layer (type) ┃ Output Shape ┃ Param # ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩ │ gru_28 (GRU) │ (None, 60, 64) │ 12,864 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_43 (Dropout) │ (None, 60, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ gru_29 (GRU) │ (None, 60, 64) │ 24,960 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_44 (Dropout) │ (None, 60, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ gru_30 (GRU) │ (None, 64) │ 24,960 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_45 (Dropout) │ (None, 64) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dense_17 (Dense) │ (None, 1) │ 65 │ └─────────────────────────────────┴────────────────────────┴───────────────┘
Total params: 62,849 (245.50 KB)
Trainable params: 62,849 (245.50 KB)
Non-trainable params: 0 (0.00 B)
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
Model: "sequential_19"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓ ┃ Layer (type) ┃ Output Shape ┃ Param # ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩ │ bidirectional_12 │ (None, 60, 128) │ 33,792 │ │ (Bidirectional) │ │ │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_46 (Dropout) │ (None, 60, 128) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ bidirectional_13 │ (None, 60, 128) │ 98,816 │ │ (Bidirectional) │ │ │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_47 (Dropout) │ (None, 60, 128) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ bidirectional_14 │ (None, 128) │ 98,816 │ │ (Bidirectional) │ │ │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_48 (Dropout) │ (None, 128) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dense_18 (Dense) │ (None, 1) │ 129 │ └─────────────────────────────────┴────────────────────────┴───────────────┘
Total params: 231,553 (904.50 KB)
Trainable params: 231,553 (904.50 KB)
Non-trainable params: 0 (0.00 B)
Model: "sequential_20"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓ ┃ Layer (type) ┃ Output Shape ┃ Param # ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩ │ bidirectional_15 │ (None, 60, 128) │ 25,728 │ │ (Bidirectional) │ │ │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_49 (Dropout) │ (None, 60, 128) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ bidirectional_16 │ (None, 60, 128) │ 74,496 │ │ (Bidirectional) │ │ │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_50 (Dropout) │ (None, 60, 128) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ bidirectional_17 │ (None, 128) │ 74,496 │ │ (Bidirectional) │ │ │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dropout_51 (Dropout) │ (None, 128) │ 0 │ ├─────────────────────────────────┼────────────────────────┼───────────────┤ │ dense_19 (Dense) │ (None, 1) │ 129 │ └─────────────────────────────────┴────────────────────────┴───────────────┘
Total params: 174,849 (683.00 KB)
Trainable params: 174,849 (683.00 KB)
Non-trainable params: 0 (0.00 B)
In [ ]:
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error, mean_absolute_error
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, GRU, Dropout, Dense, Bidirectional
# Data preparation
vn_index = pd.read_excel('VN-Index-2018-2023.xlsx')
sab_stock_2018_2022 = pd.read_excel('SAB stock price 2018-2022.xlsx')
sab_stock_2023 = pd.read_excel('SAB stock price 2023.xlsx')
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Preprocess your data (example for VN-Index)
scaler_vn = MinMaxScaler(feature_range=(0, 1))
scaled_data_vn = scaler_vn.fit_transform(vn_index['Close'].values.reshape(-1, 1))
# Create sequences
def create_sequences(data, seq_length):
X = []
y = []
for i in range(len(data) - seq_length):
X.append(data[i:i+seq_length])
y.append(data[i+seq_length])
return np.array(X), np.array(y)
seq_length = 60
X_vn, y_vn = create_sequences(scaled_data_vn, seq_length)
# Split data into training and testing
split = int(0.8 * len(X_vn))
X_train_vn, X_test_vn = X_vn[:split], X_vn[split:]
y_train_vn, y_test_vn = y_vn[:split], y_vn[split:]
# Reshape input to be [samples, time steps, features]
X_train_vn = np.reshape(X_train_vn, (X_train_vn.shape[0], X_train_vn.shape[1], 1))
X_test_vn = np.reshape(X_test_vn, (X_test_vn.shape[0], X_test_vn.shape[1], 1))
# Define and train models
def build_and_train_model(model, X_train, y_train, epochs=10, batch_size=32):
model.compile(optimizer='adam', loss='mean_squared_error')
model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, verbose=0)
return model
# LSTM Model
lstm_model = Sequential()
lstm_model.add(LSTM(64, return_sequences=True, input_shape=(X_train_vn.shape[1], 1)))
lstm_model.add(Dropout(0.2))
lstm_model.add(LSTM(64, return_sequences=True))
lstm_model.add(Dropout(0.2))
lstm_model.add(LSTM(64))
lstm_model.add(Dropout(0.2))
lstm_model.add(Dense(1))
lstm_model = build_and_train_model(lstm_model, X_train_vn, y_train_vn)
# GRU Model
gru_model = Sequential()
gru_model.add(GRU(64, return_sequences=True, input_shape=(X_train_vn.shape[1], 1)))
gru_model.add(Dropout(0.2))
gru_model.add(GRU(64, return_sequences=True))
gru_model.add(Dropout(0.2))
gru_model.add(GRU(64))
gru_model.add(Dropout(0.2))
gru_model.add(Dense(1))
gru_model = build_and_train_model(gru_model, X_train_vn, y_train_vn)
# Bidirectional LSTM Model
bidirectional_lstm_model = Sequential()
bidirectional_lstm_model.add(Bidirectional(LSTM(64, return_sequences=True), input_shape=(X_train_vn.shape[1], 1)))
bidirectional_lstm_model.add(Dropout(0.2))
bidirectional_lstm_model.add(Bidirectional(LSTM(64, return_sequences=True)))
bidirectional_lstm_model.add(Dropout(0.2))
bidirectional_lstm_model.add(Bidirectional(LSTM(64)))
bidirectional_lstm_model.add(Dropout(0.2))
bidirectional_lstm_model.add(Dense(1))
bidirectional_lstm_model = build_and_train_model(bidirectional_lstm_model, X_train_vn, y_train_vn)
# Bidirectional GRU Model
bidirectional_gru_model = Sequential()
bidirectional_gru_model.add(Bidirectional(GRU(64, return_sequences=True), input_shape=(X_train_vn.shape[1], 1)))
bidirectional_gru_model.add(Dropout(0.2))
bidirectional_gru_model.add(Bidirectional(GRU(64, return_sequences=True)))
bidirectional_gru_model.add(Dropout(0.2))
bidirectional_gru_model.add(Bidirectional(GRU(64)))
bidirectional_gru_model.add(Dropout(0.2))
bidirectional_gru_model.add(Dense(1))
bidirectional_gru_model = build_and_train_model(bidirectional_gru_model, X_train_vn, y_train_vn)
# Make predictions
predicted_lstm_vn = lstm_model.predict(X_test_vn)
predicted_gru_vn = gru_model.predict(X_test_vn)
predicted_bi_lstm_vn = bidirectional_lstm_model.predict(X_test_vn)
predicted_bi_gru_vn = bidirectional_gru_model.predict(X_test_vn)
# Inverse transform predictions to original scale
predicted_lstm_vn = scaler_vn.inverse_transform(predicted_lstm_vn)
predicted_gru_vn = scaler_vn.inverse_transform(predicted_gru_vn)
predicted_bi_lstm_vn = scaler_vn.inverse_transform(predicted_bi_lstm_vn)
predicted_bi_gru_vn = scaler_vn.inverse_transform(predicted_bi_gru_vn)
y_test_vn = scaler_vn.inverse_transform(y_test_vn.reshape(-1, 1))
# Calculate metrics for VN-INDEX
mse_lstm_vn = mean_squared_error(y_test_vn, predicted_lstm_vn)
mse_gru_vn = mean_squared_error(y_test_vn, predicted_gru_vn)
mse_bi_lstm_vn = mean_squared_error(y_test_vn, predicted_bi_lstm_vn)
mse_bi_gru_vn = mean_squared_error(y_test_vn, predicted_bi_gru_vn)
rmse_lstm_vn = np.sqrt(mse_lstm_vn)
rmse_gru_vn = np.sqrt(mse_gru_vn)
rmse_bi_lstm_vn = np.sqrt(mse_bi_lstm_vn)
rmse_bi_gru_vn = np.sqrt(mse_bi_gru_vn)
mae_lstm_vn = mean_absolute_error(y_test_vn, predicted_lstm_vn)
mae_gru_vn = mean_absolute_error(y_test_vn, predicted_gru_vn)
mae_bi_lstm_vn = mean_absolute_error(y_test_vn, predicted_bi_lstm_vn)
mae_bi_gru_vn = mean_absolute_error(y_test_vn, predicted_bi_gru_vn)
# Print the results
print(f"MSE (VN-INDEX) - LSTM: {mse_lstm_vn}, GRU: {mse_gru_vn}, Bidirectional LSTM: {mse_bi_lstm_vn}, Bidirectional GRU: {mse_bi_gru_vn}")
print(f"RMSE (VN-INDEX) - LSTM: {rmse_lstm_vn}, GRU: {rmse_gru_vn}, Bidirectional LSTM: {rmse_bi_lstm_vn}, Bidirectional GRU: {rmse_bi_gru_vn}")
print(f"MAE (VN-INDEX) - LSTM: {mae_lstm_vn}, GRU: {mae_gru_vn}, Bidirectional LSTM: {mae_bi_lstm_vn}, Bidirectional GRU: {mae_bi_gru_vn}")
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs) /Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 33ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 37ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 63ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 68ms/step MSE (VN-INDEX) - LSTM: 2297.6451185636, GRU: 1630.0795106892315, Bidirectional LSTM: 1513.6277839919858, Bidirectional GRU: 1585.821065794887 RMSE (VN-INDEX) - LSTM: 47.93375760947185, GRU: 40.374243159336515, Bidirectional LSTM: 38.90536960359053, Bidirectional GRU: 39.82236891239504 MAE (VN-INDEX) - LSTM: 37.93414596942293, GRU: 30.593597668559624, Bidirectional LSTM: 31.954895676043865, Bidirectional GRU: 31.41632623752627
In [ ]:
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, GRU, Dropout, Dense, Bidirectional
from sklearn.metrics import mean_squared_error, mean_absolute_error
# Data preparation for SAB
sab_stock_2018_2022 = pd.read_excel('SAB stock price 2018-2022.xlsx')
sab_stock_2023 = pd.read_excel('SAB stock price 2023.xlsx')
sab_stock = pd.concat([sab_stock_2018_2022, sab_stock_2023])
sab_stock.sort_values('Date', inplace=True)
sab_stock.reset_index(drop=True, inplace=True)
# Preprocess your data (example for SAB)
scaler_sab = MinMaxScaler(feature_range=(0, 1))
scaled_data_sab = scaler_sab.fit_transform(sab_stock['Close'].values.reshape(-1, 1))
# Create sequences
seq_length = 60
X_sab, y_sab = create_sequences(scaled_data_sab, seq_length)
# Split data into training and testing
split = int(0.8 * len(X_sab))
X_train_sab, X_test_sab = X_sab[:split], X_sab[split:]
y_train_sab, y_test_sab = y_sab[:split], y_sab[split:]
# Define a function to train and evaluate models
def train_and_evaluate_model(model, X_train, y_train, X_test, y_test, scaler):
model.compile(optimizer='adam', loss='mean_squared_error')
model.fit(X_train, y_train, epochs=100, batch_size=32, verbose=0)
predictions = model.predict(X_test)
predictions = scaler.inverse_transform(predictions)
y_test = scaler.inverse_transform(y_test.reshape(-1, 1))
mse = mean_squared_error(y_test, predictions)
rmse = np.sqrt(mse)
mae = mean_absolute_error(y_test, predictions)
return mse, rmse, mae
# Train and evaluate LSTM
lstm_model.compile(optimizer='adam', loss='mean_squared_error')
lstm_model.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32, verbose=0)
lstm_predictions_sab = lstm_model.predict(X_test_sab)
lstm_predictions_sab = scaler_sab.inverse_transform(lstm_predictions_sab)
y_test_sab = scaler_sab.inverse_transform(y_test_sab.reshape(-1, 1))
mse_lstm_sab = mean_squared_error(y_test_sab, lstm_predictions_sab)
rmse_lstm_sab = np.sqrt(mse_lstm_sab)
mae_lstm_sab = mean_absolute_error(y_test_sab, lstm_predictions_sab)
# Train and evaluate GRU
gru_model.compile(optimizer='adam', loss='mean_squared_error')
gru_model.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32, verbose=0)
gru_predictions_sab = gru_model.predict(X_test_sab)
gru_predictions_sab = scaler_sab.inverse_transform(gru_predictions_sab)
mse_gru_sab = mean_squared_error(y_test_sab, gru_predictions_sab)
rmse_gru_sab = np.sqrt(mse_gru_sab)
mae_gru_sab = mean_absolute_error(y_test_sab, gru_predictions_sab)
# Train and evaluate Bidirectional LSTM
bidirectional_lstm_model.compile(optimizer='adam', loss='mean_squared_error')
bidirectional_lstm_model.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32, verbose=0)
bi_lstm_predictions_sab = bidirectional_lstm_model.predict(X_test_sab)
bi_lstm_predictions_sab = scaler_sab.inverse_transform(bi_lstm_predictions_sab)
mse_bi_lstm_sab = mean_squared_error(y_test_sab, bi_lstm_predictions_sab)
rmse_bi_lstm_sab = np.sqrt(mse_bi_lstm_sab)
mae_bi_lstm_sab = mean_absolute_error(y_test_sab, bi_lstm_predictions_sab)
# Train and evaluate Bidirectional GRU
bidirectional_gru_model.compile(optimizer='adam', loss='mean_squared_error')
bidirectional_gru_model.fit(X_train_sab, y_train_sab, epochs=100, batch_size=32, verbose=0)
bi_gru_predictions_sab = bidirectional_gru_model.predict(X_test_sab)
bi_gru_predictions_sab = scaler_sab.inverse_transform(bi_gru_predictions_sab)
mse_bi_gru_sab = mean_squared_error(y_test_sab, bi_gru_predictions_sab)
rmse_bi_gru_sab = np.sqrt(mse_bi_gru_sab)
mae_bi_gru_sab = mean_absolute_error(y_test_sab, bi_gru_predictions_sab)
# Print results
print(f'MSE (SAB) - LSTM: {mse_lstm_sab}')
print(f'RMSE (SAB) - LSTM: {rmse_lstm_sab}')
print(f'MAE (SAB) - LSTM: {mae_lstm_sab}')
print(f'MSE (SAB) - GRU: {mse_gru_sab}')
print(f'RMSE (SAB) - GRU: {rmse_gru_sab}')
print(f'MAE (SAB) - GRU: {mae_gru_sab}')
print(f'MSE (SAB) - Bidirectional LSTM: {mse_bi_lstm_sab}')
print(f'RMSE (SAB) - Bidirectional LSTM: {rmse_bi_lstm_sab}')
print(f'MAE (SAB) - Bidirectional LSTM: {mae_bi_lstm_sab}')
print(f'MSE (SAB) - Bidirectional GRU: {mse_bi_gru_sab}')
print(f'RMSE (SAB) - Bidirectional GRU: {rmse_bi_gru_sab}')
print(f'MAE (SAB) - Bidirectional GRU: {mae_bi_gru_sab}')
9/9 ━━━━━━━━━━━━━━━━━━━━ 0s 10ms/step 9/9 ━━━━━━━━━━━━━━━━━━━━ 0s 10ms/step 9/9 ━━━━━━━━━━━━━━━━━━━━ 1s 17ms/step 9/9 ━━━━━━━━━━━━━━━━━━━━ 1s 15ms/step MSE (SAB) - LSTM: 2.7287297085681548 RMSE (SAB) - LSTM: 1.6518867117838787 MAE (SAB) - LSTM: 1.2240811056560943 MSE (SAB) - GRU: 1.7504593543857194 RMSE (SAB) - GRU: 1.3230492637788358 MAE (SAB) - GRU: 0.9980523029963179 MSE (SAB) - Bidirectional LSTM: 5.971884336710377 RMSE (SAB) - Bidirectional LSTM: 2.4437439179894396 MAE (SAB) - Bidirectional LSTM: 2.147823672824436 MSE (SAB) - Bidirectional GRU: 4.132904875009467 RMSE (SAB) - Bidirectional GRU: 2.032954715435016 MAE (SAB) - Bidirectional GRU: 1.7328200711144341
In [ ]:
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers import LSTM, GRU, Bidirectional, Dense, Dropout
# Assuming 'scaled_data_vn' is your scaled VN-Index data
# Load your data here if it's not loaded
# Create sequences function
def create_sequences(data, seq_length):
X = []
y = []
for i in range(len(data) - seq_length):
X.append(data[i:i+seq_length])
y.append(data[i+seq_length])
return np.array(X), np.array(y)
seq_length = 60
X_vn, y_vn = create_sequences(scaled_data_vn, seq_length)
# Split data into training and testing
split = int(0.8 * len(X_vn))
X_train_vn, X_test_vn = X_vn[:split], X_vn[split:]
y_train_vn, y_test_vn = y_vn[:split], y_vn[split:]
# Reshape data for model compatibility
X_train_vn = X_train_vn.reshape((X_train_vn.shape[0], X_train_vn.shape[1], 1))
X_test_vn = X_test_vn.reshape((X_test_vn.shape[0], X_test_vn.shape[1], 1))
# Define, compile, and train models
def compile_and_train(model, X_train, y_train, X_test, y_test):
model.compile(optimizer='adam', loss='mean_squared_error')
model.fit(X_train, y_train, epochs=100, batch_size=32, validation_data=(X_test, y_test), verbose=0)
return model
# LSTM Model
lstm_model = Sequential([
LSTM(64, return_sequences=True, input_shape=(X_train_vn.shape[1], 1)),
Dropout(0.2),
LSTM(64, return_sequences=True),
Dropout(0.2),
LSTM(64),
Dropout(0.2),
Dense(1)
])
lstm_model = compile_and_train(lstm_model, X_train_vn, y_train_vn, X_test_vn, y_test_vn)
# GRU Model
gru_model = Sequential([
GRU(64, return_sequences=True, input_shape=(X_train_vn.shape[1], 1)),
Dropout(0.2),
GRU(64, return_sequences=True),
Dropout(0.2),
GRU(64),
Dropout(0.2),
Dense(1)
])
gru_model = compile_and_train(gru_model, X_train_vn, y_train_vn, X_test_vn, y_test_vn)
# Bidirectional LSTM Model
bidirectional_lstm_model = Sequential([
Bidirectional(LSTM(64, return_sequences=True), input_shape=(X_train_vn.shape[1], 1)),
Dropout(0.2),
Bidirectional(LSTM(64, return_sequences=True)),
Dropout(0.2),
Bidirectional(LSTM(64)),
Dropout(0.2),
Dense(1)
])
bidirectional_lstm_model = compile_and_train(bidirectional_lstm_model, X_train_vn, y_train_vn, X_test_vn, y_test_vn)
# Bidirectional GRU Model
bidirectional_gru_model = Sequential([
Bidirectional(GRU(64, return_sequences=True), input_shape=(X_train_vn.shape[1], 1)),
Dropout(0.2),
Bidirectional(GRU(64, return_sequences=True)),
Dropout(0.2),
Bidirectional(GRU(64)),
Dropout(0.2),
Dense(1)
])
bidirectional_gru_model = compile_and_train(bidirectional_gru_model, X_train_vn, y_train_vn, X_test_vn, y_test_vn)
# Make predictions
y_pred_lstm = lstm_model.predict(X_test_vn)
y_pred_gru = gru_model.predict(X_test_vn)
y_pred_bidirectional_lstm = bidirectional_lstm_model.predict(X_test_vn)
y_pred_bidirectional_gru = bidirectional_gru_model.predict(X_test_vn)
# Calculate RMSE
rmse_lstm = np.sqrt(mean_squared_error(y_test_vn, y_pred_lstm))
rmse_gru = np.sqrt(mean_squared_error(y_test_vn, y_pred_gru))
rmse_bidirectional_lstm = np.sqrt(mean_squared_error(y_test_vn, y_pred_bidirectional_lstm))
rmse_bidirectional_gru = np.sqrt(mean_squared_error(y_test_vn, y_pred_bidirectional_gru))
print(f"RMSE for LSTM: {rmse_lstm}")
print(f"RMSE for GRU: {rmse_gru}")
print(f"RMSE for Bidirectional LSTM: {rmse_bidirectional_lstm}")
print(f"RMSE for Bidirectional GRU: {rmse_bidirectional_gru}")
/Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs) /Users/collide0412/grad_prj/.conda/lib/python3.12/site-packages/keras/src/layers/rnn/bidirectional.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(**kwargs)
8/8 ━━━━━━━━━━━━━━━━━━━━ 0s 38ms/step 8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 40ms/step WARNING:tensorflow:5 out of the last 17 calls to <function TensorFlowTrainer.make_predict_function.<locals>.one_step_on_data_distributed at 0x3c085a980> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has reduce_retracing=True option that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details. 8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 68ms/step WARNING:tensorflow:5 out of the last 17 calls to <function TensorFlowTrainer.make_predict_function.<locals>.one_step_on_data_distributed at 0x3bac57420> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has reduce_retracing=True option that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details. 8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 71ms/step RMSE for LSTM: 0.029255666038503686 RMSE for GRU: 0.0239900393190402 RMSE for Bidirectional LSTM: 0.02852745105672756 RMSE for Bidirectional GRU: 0.039969230994851716
In [ ]:
import matplotlib.pyplot as plt
import networkx as nx
# Create a directed graph
G = nx.DiGraph()
# Add nodes and edges for the data preprocessing steps
steps = [
"Data\nCollection",
"Data\nCleaning",
"Handle\nMissing\nValues",
"Encoding\nCategorical\nVariables",
"Normalization\nand\nStandardization",
"Data Ready\nfor Model"
]
edges = [
(steps[0], steps[1]),
(steps[1], steps[2]),
(steps[2], steps[3]),
(steps[3], steps[4]),
(steps[4], steps[5])
]
G.add_edges_from(edges)
# Define node positions
pos = {
steps[0]: (0, 5),
steps[1]: (0, 4),
steps[2]: (0, 3),
steps[3]: (0, 2),
steps[4]: (0, 1),
steps[5]: (0, 0)
}
# Draw the nodes and edges
plt.figure(figsize=(12, 12))
nx.draw_networkx_edges(G, pos, edgelist=edges, arrowstyle='-|>', arrowsize=20, edge_color='black')
# Draw nodes with text inside the circles
nx.draw_networkx_nodes(G, pos, node_size=7000, node_color="skyblue", edgecolors='black')
# Draw labels for all nodes
for node, (x, y) in pos.items():
font_size = 10
if node == "Normalization\nand\nStandardization":
font_size = 8 # Smaller font size for the long text
plt.text(x, y, node, ha='center', va='center', fontsize=font_size, fontweight='bold')
# # Add title
# plt.title("Figure 1: Data Preprocessing Overview", fontsize=15)
# plt.axis('off') # Hide the axes
# plt.show()
In [ ]:
import matplotlib.pyplot as plt
import matplotlib.patches as patches
fig, ax = plt.subplots(figsize=(10, 6))
# Define the model components with smaller bounding box size and more spacing
components = [
("Input Layer", (0, 6)),
("LSTM Layer 1", (3, 6)),
("LSTM Layer 2", (6, 6)),
("Dense Layer", (9, 6)),
("Output Layer", (12, 6)),
("Input Layer", (0, 3)),
("GRU Layer 1", (3, 3)),
("GRU Layer 2", (6, 3)),
("Dense Layer", (9, 3)),
("Output Layer", (12, 3))
]
# Draw rectangles for each component with smaller size and more spacing
for label, (x, y) in components:
rect = patches.FancyBboxPatch((x, y), 2, 1, boxstyle="round,pad=0.3", edgecolor='black', facecolor='skyblue')
ax.add_patch(rect)
plt.text(x + 1, y + 0.5, label, ha='center', va='center', fontsize=10, fontweight='bold')
# Add arrows between components
arrow_length = 0.75 # Adjusted arrow length
for i in range(4):
start = components[i][1]
end = components[i + 1][1]
ax.arrow(start[0] + 2.25, start[1] + 0.5, arrow_length, 0, head_width=0.2, head_length=0.2, fc='black', ec='black', length_includes_head=True)
start = components[i + 5][1]
end = components[i + 6][1]
ax.arrow(start[0] + 2.25, start[1] + 0.5, arrow_length, 0, head_width=0.2, head_length=0.2, fc='black', ec='black', length_includes_head=True)
# Set the limits and hide the axes
ax.set_xlim(-1, 14)
ax.set_ylim(2, 8)
ax.axis('off')
# # Add the title
# plt.title("Figure 2: Model Architecture of LSTM and GRU", fontsize=15)
# Center the figure
fig.tight_layout()
# Show the plot
plt.show()
In [ ]:
import pandas as pd
# Load the datasets
beverage_data = pd.read_excel('Beverage manufacturing data.xlsx')
ma30_data = pd.read_excel('MA30.xlsx')
sab_data = pd.read_excel('SAB data.xlsx')
vn_index_2023 = pd.read_excel('VN-index 2023.xlsx')
vn_index_2018_2023 = pd.read_excel('VN-Index-2018-2023.xlsx')
sab_stock_2018_2022 = pd.read_excel('SAB stock price 2018-2022.xlsx')
sab_stock_2023 = pd.read_excel('SAB stock price 2023.xlsx')
# Display the first few rows of each dataset to understand the structure
print(beverage_data.head())
print(ma30_data.head())
print(sab_data.head())
print(vn_index_2023.head())
print(vn_index_2018_2023.head())
print(sab_stock_2018_2022.head())
print(sab_stock_2023.head())
# Extracting key financial indicators from the SAB data as an example
# This process needs to be adjusted based on your actual needs and data structure
# Example: Summarizing revenue and net income from the SAB data
key_indicators = {
"Indicator": [],
"Company": [],
"Year": [],
"Value": [],
"Category": [],
"Category 2": []
}
# Replace 'Indicator', 'Company', 'Year', 'Value', 'Category', and 'Category 2' with actual column names
for index, row in sab_data.iterrows():
key_indicators["Indicator"].append(row['Indicator'])
key_indicators["Company"].append(row['Company'])
key_indicators["Year"].append(row['Year'])
key_indicators["Value"].append(row['Value'])
key_indicators["Category"].append(row['Category'])
key_indicators["Category 2"].append(row['Category 2'])
# Convert to DataFrame
key_indicators_df = pd.DataFrame(key_indicators)
# Save the key indicators to a CSV file
key_indicators_df.to_csv('key_financial_indicators.csv', index=False)
# Display the extracted key financial indicators
print(key_indicators_df)
Indicator Company Year Value Category \
0 - Accumulated depreciation BHN 2020 -6.0 ASSETS
1 - Accumulated depreciation BHN 2021 -7.0 ASSETS
2 - Accumulated depreciation BHN 2022 -8.0 ASSETS
3 - Accumulated depreciation SAB 2020 -27.0 ASSETS
4 - Accumulated depreciation SAB 2021 -26.0 ASSETS
Category 2
0 B. LONG-TERM ASSETS
1 B. LONG-TERM ASSETS
2 B. LONG-TERM ASSETS
3 B. LONG-TERM ASSETS
4 B. LONG-TERM ASSETS
Date VN-INDEX SAB MA30 (VN-INDEX) MA30 (SAB)
0 2023-01-03 1043.90 84.50 NaN NaN
1 2023-01-04 1046.35 85.75 NaN NaN
2 2023-01-05 1055.82 87.10 NaN NaN
3 2023-01-06 1051.44 90.35 NaN NaN
4 2023-01-09 1054.21 89.80 NaN NaN
Indicator Unit Year SAB Category \
0 A. SHORT-TERM ASSETS NaN 2020 19513.0 ASSETS
1 A. SHORT-TERM ASSETS NaN 2021 22877.0 ASSETS
2 A. SHORT-TERM ASSETS NaN 2022 26860.0 ASSETS
3 I. Cash and cash equivalents NaN 2020 2726.0 ASSETS
4 I. Cash and cash equivalents NaN 2021 3606.0 ASSETS
Category 2
0 A. SHORT-TERM ASSETS
1 A. SHORT-TERM ASSETS
2 A. SHORT-TERM ASSETS
3 A. SHORT-TERM ASSETS
4 A. SHORT-TERM ASSETS
Date Close Open High Low Volume
0 2023-01-03 1043.90 1011.39 1043.90 1011.38 518145310
1 2023-01-04 1046.35 1048.31 1052.60 1041.56 566957000
2 2023-01-05 1055.82 1048.17 1058.14 1046.64 496500700
3 2023-01-06 1051.44 1055.51 1065.58 1047.26 614549000
4 2023-01-09 1054.21 1058.28 1062.12 1050.95 386345500
Date Close Open High Low Volume
0 2018-01-02 995.77 986.05 996.18 984.24 172887390
1 2018-01-03 1005.67 999.86 1010.21 995.77 212432620
2 2018-01-04 1019.75 1009.37 1019.75 1005.67 235169670
3 2018-01-05 1012.65 1020.34 1020.60 1010.65 265519370
4 2018-01-08 1022.90 1011.36 1022.90 1004.89 234755510
Date Close Open High Low Volume
0 2018-01-02 126.15 127.00 127.35 124.95 116190
1 2018-01-03 132.50 126.15 132.50 126.15 231500
2 2018-01-04 133.75 134.00 134.35 131.50 238800
3 2018-01-05 132.15 134.00 134.25 131.55 131870
4 2018-01-08 130.50 131.50 132.00 129.50 115690
Date Close Open High Low Volume
0 2023-01-03 84.50 85.55 86.75 84.10 121700
1 2023-01-04 85.75 84.60 87.00 84.50 116900
2 2023-01-05 87.10 86.00 91.75 85.75 255300
3 2023-01-06 90.35 88.60 91.30 87.10 130100
4 2023-01-09 89.80 89.75 91.00 87.90 46900
--------------------------------------------------------------------------- KeyError Traceback (most recent call last) File ~/grad_prj/.conda/lib/python3.12/site-packages/pandas/core/indexes/base.py:3805, in Index.get_loc(self, key) 3804 try: -> 3805 return self._engine.get_loc(casted_key) 3806 except KeyError as err: File index.pyx:167, in pandas._libs.index.IndexEngine.get_loc() File index.pyx:196, in pandas._libs.index.IndexEngine.get_loc() File pandas/_libs/hashtable_class_helper.pxi:7081, in pandas._libs.hashtable.PyObjectHashTable.get_item() File pandas/_libs/hashtable_class_helper.pxi:7089, in pandas._libs.hashtable.PyObjectHashTable.get_item() KeyError: 'Company' The above exception was the direct cause of the following exception: KeyError Traceback (most recent call last) Cell In[37], line 37 35 for index, row in sab_data.iterrows(): 36 key_indicators["Indicator"].append(row['Indicator']) ---> 37 key_indicators["Company"].append(row['Company']) 38 key_indicators["Year"].append(row['Year']) 39 key_indicators["Value"].append(row['Value']) File ~/grad_prj/.conda/lib/python3.12/site-packages/pandas/core/series.py:1121, in Series.__getitem__(self, key) 1118 return self._values[key] 1120 elif key_is_scalar: -> 1121 return self._get_value(key) 1123 # Convert generator to list before going through hashable part 1124 # (We will iterate through the generator there to check for slices) 1125 if is_iterator(key): File ~/grad_prj/.conda/lib/python3.12/site-packages/pandas/core/series.py:1237, in Series._get_value(self, label, takeable) 1234 return self._values[label] 1236 # Similar to Index.get_value, but we do not fall back to positional -> 1237 loc = self.index.get_loc(label) 1239 if is_integer(loc): 1240 return self._values[loc] File ~/grad_prj/.conda/lib/python3.12/site-packages/pandas/core/indexes/base.py:3812, in Index.get_loc(self, key) 3807 if isinstance(casted_key, slice) or ( 3808 isinstance(casted_key, abc.Iterable) 3809 and any(isinstance(x, slice) for x in casted_key) 3810 ): 3811 raise InvalidIndexError(key) -> 3812 raise KeyError(key) from err 3813 except TypeError: 3814 # If we have a listlike key, _check_indexing_error will raise 3815 # InvalidIndexError. Otherwise we fall through and re-raise 3816 # the TypeError. 3817 self._check_indexing_error(key) KeyError: 'Company'
In [ ]:
import pandas as pd
# Load the datasets
beverage_data = pd.read_excel('Beverage manufacturing data.xlsx')
ma30_data = pd.read_excel('MA30.xlsx')
sab_data = pd.read_excel('SAB data.xlsx')
vn_index_2023 = pd.read_excel('VN-index 2023.xlsx')
vn_index_2018_2023 = pd.read_excel('VN-Index-2018-2023.xlsx')
sab_stock_2018_2022 = pd.read_excel('SAB stock price 2018-2022.xlsx')
sab_stock_2023 = pd.read_excel('SAB stock price 2023.xlsx')
# Display the column names of each dataset
print("Beverage manufacturing data columns:", beverage_data.columns)
print("MA30 data columns:", ma30_data.columns)
print("SAB data columns:", sab_data.columns)
print("VN-index 2023 columns:", vn_index_2023.columns)
print("VN-Index-2018-2023 columns:", vn_index_2018_2023.columns)
print("SAB stock price 2018-2022 columns:", sab_stock_2018_2022.columns)
print("SAB stock price 2023 columns:", sab_stock_2023.columns)
Beverage manufacturing data columns: Index(['Indicator', 'Company', 'Year', 'Value', 'Category', 'Category 2'], dtype='object') MA30 data columns: Index(['Date', 'VN-INDEX', 'SAB', 'MA30 (VN-INDEX)', 'MA30 (SAB)'], dtype='object') SAB data columns: Index(['Indicator', 'Unit', 'Year', 'SAB', 'Category', 'Category 2'], dtype='object') VN-index 2023 columns: Index(['Date', 'Close', 'Open', 'High', 'Low', 'Volume'], dtype='object') VN-Index-2018-2023 columns: Index(['Date', 'Close', 'Open', 'High', 'Low', 'Volume'], dtype='object') SAB stock price 2018-2022 columns: Index(['Date', 'Close', 'Open', 'High', 'Low', 'Volume'], dtype='object') SAB stock price 2023 columns: Index(['Date', 'Close', 'Open', 'High', 'Low', 'Volume'], dtype='object')
In [ ]:
# Extract key financial indicators from the beverage data
key_indicators_beverage = beverage_data[['Indicator', 'Company', 'Year', 'Value', 'Category', 'Category 2']]
# Extract key financial indicators from the SAB data (adjusted column names)
key_indicators_sab = sab_data[['Indicator', 'Year', 'SAB', 'Category', 'Category 2']]
key_indicators_sab.columns = ['Indicator', 'Year', 'Value', 'Category', 'Category 2']
key_indicators_sab['Company'] = 'SAB'
# Combine the extracted key financial indicators
key_indicators = pd.concat([key_indicators_beverage, key_indicators_sab])
# Save the key indicators to a CSV file
key_indicators.to_csv('key_financial_indicators.csv', index=False)
# Display the extracted key financial indicators
print(key_indicators)
Indicator Company Year Value Category \
0 - Accumulated depreciation BHN 2020 -6.00 ASSETS
1 - Accumulated depreciation BHN 2021 -7.00 ASSETS
2 - Accumulated depreciation BHN 2022 -8.00 ASSETS
3 - Accumulated depreciation SAB 2020 -27.00 ASSETS
4 - Accumulated depreciation SAB 2021 -26.00 ASSETS
.. ... ... ... ... ...
591 Intangible fixed assets/Fixed assets SAB 2021 21.20 Ratios
592 Intangible fixed assets/Fixed assets SAB 2022 20.73 Ratios
593 Construction in progress/Fixed assets SAB 2020 0.58 Ratios
594 Construction in progress/Fixed assets SAB 2021 12.53 Ratios
595 Construction in progress/Fixed assets SAB 2022 3.00 Ratios
Category 2
0 B. LONG-TERM ASSETS
1 B. LONG-TERM ASSETS
2 B. LONG-TERM ASSETS
3 B. LONG-TERM ASSETS
4 B. LONG-TERM ASSETS
.. ...
591 Long-term asset structure
592 Long-term asset structure
593 Long-term asset structure
594 Long-term asset structure
595 Long-term asset structure
[5154 rows x 6 columns]
In [ ]:
# Extract key financial indicators from the beverage data
key_indicators_beverage = beverage_data[['Indicator', 'Company', 'Year', 'Value', 'Category', 'Category 2']]
# Extract key financial indicators from the SAB data (adjusted column names)
key_indicators_sab = sab_data[['Indicator', 'Year', 'SAB', 'Category', 'Category 2']]
key_indicators_sab.columns = ['Indicator', 'Year', 'Value', 'Category', 'Category 2']
key_indicators_sab['Company'] = 'SAB'
# Combine the extracted key financial indicators
key_indicators = pd.concat([key_indicators_beverage, key_indicators_sab])
# Define the key indicators
main_indicators = [
'Total assets', 'Total liabilities', 'Net profit after tax',
'Gross profit', 'Revenue', 'Operating profit'
]
# Filter the dataset for the main indicators
filtered_indicators = key_indicators[key_indicators['Indicator'].isin(main_indicators)]
# Create the summary table
summary_table = filtered_indicators.pivot_table(index=['Company', 'Year'], columns='Indicator', values='Value', aggfunc='sum').reset_index()
# Save the summary table to a CSV file
summary_table.to_csv('key_financial_indicators_summary.csv', index=False)
# Display the summary table
print(summary_table)
Indicator Company Year Gross profit Total assets 0 BHN 2020 -19.44 -1.13 1 BHN 2021 -14.31 -7.76 2 BHN 2022 36.35 2.05 3 HAD 2020 -1.30 -8.75 4 HAD 2021 -26.06 -3.31 5 HAD 2022 46.09 5.19 6 HAT 2020 -20.08 5.38 7 HAT 2021 -49.50 -20.46 8 HAT 2022 252.21 36.53 9 SAB 2020 -21.98 3.06 10 SAB 2021 -21.00 22.74 11 SAB 2022 83.12 26.10 12 SCD 2020 -42.98 65.53 13 SCD 2021 -49.83 -9.37 14 SCD 2022 58.54 60.95 15 SMB 2020 -7.71 0.83 16 SMB 2021 1.24 16.54 17 SMB 2022 18.44 4.42 18 THB 2020 6.23 2.63 19 THB 2021 -11.07 1.80 20 THB 2022 -8.27 1.74 21 VCF 2020 2.99 -4.20 22 VCF 2021 -37.61 -8.73 23 VCF 2022 -11.10 8.26 24 VDL 2020 -22.26 -16.80 25 VDL 2021 -33.27 6.88 26 VDL 2022 -67.96 -10.54
In [ ]:
import pandas as pd
# Data
data = {
"Company": ["BHN", "BHN", "BHN", "HAD", "HAD", "HAD", "HAT", "HAT", "HAT", "SAB", "SAB", "SAB",
"SCD", "SCD", "SCD", "SMB", "SMB", "SMB", "THB", "THB", "THB", "VCF", "VCF", "VCF",
"VDL", "VDL", "VDL"],
"Year": [2020, 2021, 2022, 2020, 2021, 2022, 2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022, 2020, 2021, 2022, 2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022],
"Gross Profit": [-19.44, -14.31, 36.35, -1.30, -26.06, 46.09, -20.08, -49.50, 252.21, -21.98, -21.00, 83.12,
-42.98, -49.83, 58.54, -7.71, 1.24, 18.44, 6.23, -11.07, -8.27, 2.99, -37.61, -11.10,
-22.26, -33.27, -67.96],
"Total Assets": [-1.13, -7.76, 2.05, -8.75, -3.31, 5.19, 5.38, -20.46, 36.53, 3.06, 22.74, 26.10,
65.53, -9.37, 60.95, 0.83, 16.54, 4.42, 2.63, 1.80, 1.74, -4.20, -8.73, 8.26,
-16.80, 6.88, -10.54]
}
# Creating DataFrame
df = pd.DataFrame(data)
# Calculating the average for each company
average_df = df.groupby('Company').mean().reset_index()
# Displaying the result
print(average_df)
Company Year Gross Profit Total Assets 0 BHN 2021.0 0.866667 -2.280000 1 HAD 2021.0 6.243333 -2.290000 2 HAT 2021.0 60.876667 7.150000 3 SAB 2021.0 13.380000 17.300000 4 SCD 2021.0 -11.423333 39.036667 5 SMB 2021.0 3.990000 7.263333 6 THB 2021.0 -4.370000 2.056667 7 VCF 2021.0 -15.240000 -1.556667 8 VDL 2021.0 -41.163333 -6.820000
In [ ]:
import pandas as pd
# Data
data = {
'Company': [
'BHN', 'BHN', 'BHN', 'HAD', 'HAD', 'HAD',
'HAT', 'HAT', 'HAT', 'SAB', 'SAB', 'SAB',
'SCD', 'SCD', 'SCD', 'SMB', 'SMB', 'SMB',
'THB', 'THB', 'THB', 'VCF', 'VCF', 'VCF',
'VDL', 'VDL', 'VDL'
],
'Year': [
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022
],
'Gross Profit': [
-19.44, -14.31, 36.35, -1.30, -26.06, 46.09,
-20.08, -49.50, 252.21, -21.98, -21.00, 83.12,
-42.98, -49.83, 58.54, -7.71, 1.24, 18.44,
6.23, -11.07, -8.27, 2.99, -37.61, -11.10,
-22.26, -33.27, -67.96
],
'Total Assets': [
-1.13, -7.76, 2.05, -8.75, -3.31, 5.19,
5.38, -20.46, 36.53, 3.06, 22.74, 26.10,
65.53, -9.37, 60.95, 0.83, 16.54, 4.42,
2.63, 1.80, 1.74, -4.20, -8.73, 8.26,
-16.80, 6.88, -10.54
]
}
# Creating DataFrame
df = pd.DataFrame(data)
# Calculating the average values for Gross Profit and Total Assets over the three years (2020-2022) for each company
average_data = df.groupby('Company').mean().reset_index()
# Displaying the resulting DataFrame
print(average_data)
# Exporting the DataFrame to a CSV file
average_data.to_csv('Average_Financial_Indicators.csv', index=False)
Company Year Gross Profit Total Assets 0 BHN 2021.0 0.866667 -2.280000 1 HAD 2021.0 6.243333 -2.290000 2 HAT 2021.0 60.876667 7.150000 3 SAB 2021.0 13.380000 17.300000 4 SCD 2021.0 -11.423333 39.036667 5 SMB 2021.0 3.990000 7.263333 6 THB 2021.0 -4.370000 2.056667 7 VCF 2021.0 -15.240000 -1.556667 8 VDL 2021.0 -41.163333 -6.820000
In [ ]:
import pandas as pd
# Data
data = {
'Company': [
'BHN', 'BHN', 'BHN', 'HAD', 'HAD', 'HAD',
'HAT', 'HAT', 'HAT', 'SAB', 'SAB', 'SAB',
'SCD', 'SCD', 'SCD', 'SMB', 'SMB', 'SMB',
'THB', 'THB', 'THB', 'VCF', 'VCF', 'VCF',
'VDL', 'VDL', 'VDL'
],
'Year': [
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022, 2020, 2021, 2022,
2020, 2021, 2022
],
'Gross Profit': [
-19.44, -14.31, 36.35, -1.30, -26.06, 46.09,
-20.08, -49.50, 252.21, -21.98, -21.00, 83.12,
-42.98, -49.83, 58.54, -7.71, 1.24, 18.44,
6.23, -11.07, -8.27, 2.99, -37.61, -11.10,
-22.26, -33.27, -67.96
],
'Total Assets': [
-1.13, -7.76, 2.05, -8.75, -3.31, 5.19,
5.38, -20.46, 36.53, 3.06, 22.74, 26.10,
65.53, -9.37, 60.95, 0.83, 16.54, 4.42,
2.63, 1.80, 1.74, -4.20, -8.73, 8.26,
-16.80, 6.88, -10.54
]
}
# Creating DataFrame
df = pd.DataFrame(data)
# Calculating the average values for Gross Profit and Total Assets over the three years (2020-2022) for each company
average_data = df.groupby('Company').agg({'Gross Profit': 'mean', 'Total Assets': 'mean'}).reset_index()
# Displaying the resulting DataFrame
print(average_data)
# Exporting the DataFrame to a CSV file
average_data.to_csv('Average_Financial_Indicators.csv', index=False)
Company Gross Profit Total Assets 0 BHN 0.866667 -2.280000 1 HAD 6.243333 -2.290000 2 HAT 60.876667 7.150000 3 SAB 13.380000 17.300000 4 SCD -11.423333 39.036667 5 SMB 3.990000 7.263333 6 THB -4.370000 2.056667 7 VCF -15.240000 -1.556667 8 VDL -41.163333 -6.820000
In [ ]:
# Print the column names to verify the correct ones
print(beverage_data.columns)
# Assuming 'Indicator' represents Gross Profit and Total Assets, we filter those rows
key_indicators = beverage_data[(beverage_data['Indicator'].str.contains('Gross profit')) | (beverage_data['Indicator'].str.contains('Total assets'))]
# Pivot the table to get columns for Gross Profit and Total Assets
pivot_table = key_indicators.pivot_table(index=['Company', 'Year'], columns='Indicator', values='Value').reset_index()
# Calculate descriptive statistics
descriptive_stats = pivot_table.describe()
# Display the table
descriptive_stats
Index(['Indicator', 'Company', 'Year', 'Value', 'Category', 'Category 2'], dtype='object')
Out[ ]:
| Indicator | Year | 5. Gross profit | Fixed assets/Total assets | Gross profit | Gross profit margin | Long-term assets/Total assets | Short-term assets/Total assets | Total assets |
|---|---|---|---|---|---|---|---|---|
| count | 27.00000 | 27.000000 | 27.000000 | 27.000000 | 27.000000 | 27.000000 | 27.000000 | 27.000000 |
| mean | 2021.00000 | 1360.333333 | 25.352593 | 0.718889 | 19.847037 | 36.005926 | 63.994074 | 5.690000 |
| std | 0.83205 | 2842.998147 | 10.964164 | 58.596533 | 9.117338 | 16.194499 | 16.194499 | 19.996656 |
| min | 2020.00000 | 6.000000 | 9.340000 | -67.960000 | 3.160000 | 15.950000 | 17.020000 | -20.460000 |
| 25% | 2020.00000 | 35.000000 | 16.555000 | -24.160000 | 11.455000 | 24.500000 | 57.935000 | -5.980000 |
| 50% | 2021.00000 | 171.000000 | 22.010000 | -10.990000 | 24.350000 | 31.760000 | 68.240000 | 1.800000 |
| 75% | 2022.00000 | 722.500000 | 32.315000 | 4.610000 | 26.730000 | 42.065000 | 75.500000 | 7.570000 |
| max | 2022.00000 | 10771.000000 | 47.520000 | 252.210000 | 30.790000 | 82.980000 | 84.050000 | 65.530000 |
In [ ]:
import pandas as pd
# Assuming 'beverage_data' is the dataframe containing your dataset
# Filter the necessary columns
filtered_data = beverage_data[['Year', 'Value', 'Indicator']]
# Pivot the data to get 'Value' for different indicators
pivot_table = filtered_data.pivot_table(index='Year', columns='Indicator', values='Value')
# Calculate descriptive statistics
descriptive_stats = pivot_table.describe()
# Display the table in a Jupyter notebook
%pip install Jinja2
descriptive_stats.style.set_table_attributes("style='display:inline'").set_caption('Table 2: Descriptive Statistics')
# Save the descriptive statistics to an HTML file for better visualization
descriptive_stats.to_html("descriptive_statistics.html")
descriptive_stats
Collecting Jinja2 Downloading jinja2-3.1.4-py3-none-any.whl.metadata (2.6 kB) Requirement already satisfied: MarkupSafe>=2.0 in ./.conda/lib/python3.12/site-packages (from Jinja2) (2.1.5) Downloading jinja2-3.1.4-py3-none-any.whl (133 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 133.3/133.3 kB 222.9 kB/s eta 0:00:00a 0:00:01 Installing collected packages: Jinja2 Successfully installed Jinja2-3.1.4 Note: you may need to restart the kernel to use updated packages.
Out[ ]:
| Indicator | (Increase)/decrease in inventories | (Increase)/decrease in prepaid expenses | (Increase)/decrease in receivables | (Reversal of provisions)/provisions | 1. Payment for fixed assets, constructions and other long-term assets | 1. Profit before tax | 1. Revenue | 10. General and administrative expenses | 11. Operating profit | 12. Other income | ... | - Common stock with voting right | - Cost | - Undistributed earnings in this period | 1. Owner's capital | 1. Tangible fixed assets | 11. Undistributed earnings after tax | 2. Financial leased fixed assets | 3. Intangible fixed assets | - Accumulated depreciation | - Cost |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | ... | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.00000 | 3.000000 |
| mean | -21.819444 | 7.809524 | 10.527778 | -22.211111 | -78.939815 | 818.037037 | 4905.888889 | 155.888889 | 799.222222 | 26.134259 | ... | 1079.111111 | 45.833333 | 601.703704 | 1079.111111 | 724.148148 | 1769.074074 | 169.000000 | 140.285714 | -889.55000 | 1283.483333 |
| std | 96.052161 | 18.945455 | 106.480667 | 42.883956 | 21.378543 | 131.110374 | 645.287255 | 15.456430 | 126.550434 | 17.870636 | ... | 0.000000 | 29.954479 | 107.695348 | 0.000000 | 72.488171 | 170.680567 | 8.185353 | 10.776960 | 55.03299 | 23.239101 |
| min | -116.125000 | -5.500000 | -95.625000 | -59.800000 | -103.625000 | 666.666667 | 4367.888889 | 139.666667 | 654.333333 | 13.777778 | ... | 1079.111111 | 23.000000 | 477.555556 | 1079.111111 | 664.666667 | 1660.111111 | 162.000000 | 128.000000 | -943.45000 | 1264.500000 |
| 25% | -70.673611 | -3.035714 | -42.875000 | -45.566667 | -85.187500 | 779.055556 | 4548.166667 | 148.611111 | 754.777778 | 15.888889 | ... | 1079.111111 | 28.875000 | 567.555556 | 1079.111111 | 683.777778 | 1670.722222 | 164.500000 | 136.357143 | -917.60000 | 1270.525000 |
| 50% | -25.222222 | -0.571429 | 9.875000 | -31.333333 | -66.750000 | 891.444444 | 4728.444444 | 157.555556 | 855.222222 | 18.000000 | ... | 1079.111111 | 34.750000 | 657.555556 | 1079.111111 | 702.888889 | 1681.333333 | 167.000000 | 144.714286 | -891.75000 | 1276.550000 |
| 75% | 25.333333 | 14.464286 | 63.604167 | -3.416667 | -66.597222 | 893.722222 | 5174.888889 | 164.000000 | 871.666667 | 32.312500 | ... | 1079.111111 | 57.250000 | 663.777778 | 1079.111111 | 753.888889 | 1823.555556 | 172.500000 | 146.428571 | -862.60000 | 1292.975000 |
| max | 75.888889 | 29.500000 | 117.333333 | 24.500000 | -66.444444 | 896.000000 | 5621.333333 | 170.444444 | 888.111111 | 46.625000 | ... | 1079.111111 | 79.750000 | 670.000000 | 1079.111111 | 804.888889 | 1965.777778 | 178.000000 | 148.142857 | -833.45000 | 1309.400000 |
8 rows × 222 columns
In [ ]:
# Filter necessary columns
filtered_data = beverage_data[['Indicator', 'Year', 'Value']]
# Pivot the data to get 'Value' for different indicators
pivot_table = filtered_data.pivot_table(index='Year', columns='Indicator', values='Value')
# Calculate descriptive statistics
descriptive_stats = pivot_table.describe()
# Display the descriptive statistics table
descriptive_stats
Out[ ]:
| Indicator | (Increase)/decrease in inventories | (Increase)/decrease in prepaid expenses | (Increase)/decrease in receivables | (Reversal of provisions)/provisions | 1. Payment for fixed assets, constructions and other long-term assets | 1. Profit before tax | 1. Revenue | 10. General and administrative expenses | 11. Operating profit | 12. Other income | ... | - Common stock with voting right | - Cost | - Undistributed earnings in this period | 1. Owner's capital | 1. Tangible fixed assets | 11. Undistributed earnings after tax | 2. Financial leased fixed assets | 3. Intangible fixed assets | - Accumulated depreciation | - Cost |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | ... | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.000000 | 3.00000 | 3.000000 |
| mean | -21.819444 | 7.809524 | 10.527778 | -22.211111 | -78.939815 | 818.037037 | 4905.888889 | 155.888889 | 799.222222 | 26.134259 | ... | 1079.111111 | 45.833333 | 601.703704 | 1079.111111 | 724.148148 | 1769.074074 | 169.000000 | 140.285714 | -889.55000 | 1283.483333 |
| std | 96.052161 | 18.945455 | 106.480667 | 42.883956 | 21.378543 | 131.110374 | 645.287255 | 15.456430 | 126.550434 | 17.870636 | ... | 0.000000 | 29.954479 | 107.695348 | 0.000000 | 72.488171 | 170.680567 | 8.185353 | 10.776960 | 55.03299 | 23.239101 |
| min | -116.125000 | -5.500000 | -95.625000 | -59.800000 | -103.625000 | 666.666667 | 4367.888889 | 139.666667 | 654.333333 | 13.777778 | ... | 1079.111111 | 23.000000 | 477.555556 | 1079.111111 | 664.666667 | 1660.111111 | 162.000000 | 128.000000 | -943.45000 | 1264.500000 |
| 25% | -70.673611 | -3.035714 | -42.875000 | -45.566667 | -85.187500 | 779.055556 | 4548.166667 | 148.611111 | 754.777778 | 15.888889 | ... | 1079.111111 | 28.875000 | 567.555556 | 1079.111111 | 683.777778 | 1670.722222 | 164.500000 | 136.357143 | -917.60000 | 1270.525000 |
| 50% | -25.222222 | -0.571429 | 9.875000 | -31.333333 | -66.750000 | 891.444444 | 4728.444444 | 157.555556 | 855.222222 | 18.000000 | ... | 1079.111111 | 34.750000 | 657.555556 | 1079.111111 | 702.888889 | 1681.333333 | 167.000000 | 144.714286 | -891.75000 | 1276.550000 |
| 75% | 25.333333 | 14.464286 | 63.604167 | -3.416667 | -66.597222 | 893.722222 | 5174.888889 | 164.000000 | 871.666667 | 32.312500 | ... | 1079.111111 | 57.250000 | 663.777778 | 1079.111111 | 753.888889 | 1823.555556 | 172.500000 | 146.428571 | -862.60000 | 1292.975000 |
| max | 75.888889 | 29.500000 | 117.333333 | 24.500000 | -66.444444 | 896.000000 | 5621.333333 | 170.444444 | 888.111111 | 46.625000 | ... | 1079.111111 | 79.750000 | 670.000000 | 1079.111111 | 804.888889 | 1965.777778 | 178.000000 | 148.142857 | -833.45000 | 1309.400000 |
8 rows × 222 columns
In [ ]:
# List unique indicators
unique_indicators = beverage_data['Indicator'].unique()
print(unique_indicators)
['\xa0\xa0\xa0\xa0\xa0- Accumulated depreciation' '\xa0\xa0\xa0\xa0\xa0- Accumulated retained earning at the end of the previous period' '\xa0\xa0\xa0\xa0\xa0- Common stock with voting right' '\xa0\xa0\xa0\xa0\xa0- Cost' '\xa0\xa0\xa0\xa0\xa0- Undistributed earnings in this period' '\xa0\xa0\xa0\xa0\xa0\xa0- Accumulated depreciation' '\xa0\xa0\xa0\xa0\xa0\xa0- Cost' "\xa0\xa0\xa0\xa0\xa01. Owner's capital" '\xa0\xa0\xa0\xa0\xa01. Tangible fixed assets' '\xa0\xa0\xa0\xa0\xa011. Undistributed earnings after tax' '\xa0\xa0\xa0\xa0\xa02. Financial leased fixed assets' '\xa0\xa0\xa0\xa0\xa03. Intangible fixed assets' '\xa0\xa0\xa0\xa01. Cash' '\xa0\xa0\xa0\xa01. Inventories' '\xa0\xa0\xa0\xa01. Long-term prepayments' '\xa0\xa0\xa0\xa01. Long-term trade payables' '\xa0\xa0\xa0\xa01. Long-term trade receivables' '\xa0\xa0\xa0\xa01. Short-term prepayments' '\xa0\xa0\xa0\xa01. Short-term trade accounts payable' '\xa0\xa0\xa0\xa01. Short-term trade accounts receivable' '\xa0\xa0\xa0\xa01. Subsidized not-for-profit funds' "\xa0\xa0\xa0\xa010. Other funds from owner's equity" '\xa0\xa0\xa0\xa010. Short-term borrowings and financial leases' '\xa0\xa0\xa0\xa011. Deferred income tax liabilities' '\xa0\xa0\xa0\xa011. Provision for short-term liabilities' '\xa0\xa0\xa0\xa012. Provision for long-term liabilities' '\xa0\xa0\xa0\xa012.. Bonus and welfare fund' '\xa0\xa0\xa0\xa013. Fund for technology development' "\xa0\xa0\xa0\xa013. Minority's interest" '\xa0\xa0\xa0\xa02. Cash equivalents' '\xa0\xa0\xa0\xa02. Construction in progress' '\xa0\xa0\xa0\xa02. Deferred income tax assets' '\xa0\xa0\xa0\xa02. Funds invested in fixed assets' '\xa0\xa0\xa0\xa02. Investments in associates, joint-ventures' '\xa0\xa0\xa0\xa02. Long-term prepayments to suppliers' '\xa0\xa0\xa0\xa02. Provision for decline in value of inventories' '\xa0\xa0\xa0\xa02. Share premium' '\xa0\xa0\xa0\xa02. Short-term advances from customers' '\xa0\xa0\xa0\xa02. Short-term prepayments to suppliers' '\xa0\xa0\xa0\xa02. Value added tax to be reclaimed' '\xa0\xa0\xa0\xa03. Held to maturity investments' '\xa0\xa0\xa0\xa03. Investments in other entities' '\xa0\xa0\xa0\xa03. Long-term equipment, supplies, spare parts' '\xa0\xa0\xa0\xa03. Taxes and other payables to state authorities' '\xa0\xa0\xa0\xa03. Taxes and other receivables from state authorities' '\xa0\xa0\xa0\xa04. Inter-company payables on business capital' '\xa0\xa0\xa0\xa04. Other capital of owners' '\xa0\xa0\xa0\xa04. Payable to employees' '\xa0\xa0\xa0\xa04. Provision for diminution in value of long-term investments' '\xa0\xa0\xa0\xa05. Held to maturity investments' '\xa0\xa0\xa0\xa05. Long-term loan receivables' '\xa0\xa0\xa0\xa05. Short-term acrrued expenses' '\xa0\xa0\xa0\xa05. Short-term loan receivables' '\xa0\xa0\xa0\xa05. Treasury shares' '\xa0\xa0\xa0\xa06. Other long-term receivables' '\xa0\xa0\xa0\xa06. Other short-term receivables' '\xa0\xa0\xa0\xa07. Foreign exchange differences' '\xa0\xa0\xa0\xa07. Other long-term liabilities' '\xa0\xa0\xa0\xa07. Provision for long-term doubtful debts' '\xa0\xa0\xa0\xa07. Provision for short-term doubtful debts (*)' '\xa0\xa0\xa0\xa08. Assets awaiting resolution' '\xa0\xa0\xa0\xa08. Investment and development fund' '\xa0\xa0\xa0\xa08. Long-term borrowings and financial leases' '\xa0\xa0\xa0\xa08. Short-term unearned revenue' '\xa0\xa0\xa0\xa09. Other short-term payables' '\xa0\xa0\xa0\xa0I. Cash and cash equivalents' '\xa0\xa0\xa0\xa0I. Long-term receivables' "\xa0\xa0\xa0\xa0I. Owner's equity" '\xa0\xa0\xa0\xa0I. Short -term liabilities' '\xa0\xa0\xa0\xa0II. Fixed assets' '\xa0\xa0\xa0\xa0II. Long-term liabilities' '\xa0\xa0\xa0\xa0II. Other resources and funds' '\xa0\xa0\xa0\xa0II. Short-term financial investments' '\xa0\xa0\xa0\xa0III. Investment properties' '\xa0\xa0\xa0\xa0III. Short-term receivables' '\xa0\xa0\xa0\xa0IV. Inventories' '\xa0\xa0\xa0\xa0IV. Long-term assets in progress' '\xa0\xa0\xa0\xa0V. Long-term financial investments' '\xa0\xa0\xa0\xa0V. Other short-term assets' '\xa0\xa0\xa0\xa0VI. Other long-term assets' '\xa0\xa0\xa0A. LIABILITIES' '\xa0\xa0\xa0A. SHORT-TERM ASSETS' '\xa0\xa0\xa0B. LONG-TERM ASSETS' "\xa0\xa0\xa0B. OWNER'S EQUITY" '\xa0\xa0\xa0VII. Goodwill' '\xa0TOTAL ASSETS' "\xa0TOTAL OWNER'S EQUITY AND LIABILITIES" '(Increase)/decrease in inventories' '(Increase)/decrease in prepaid expenses' '(Increase)/decrease in receivables' '(Reversal of provisions)/provisions' '1. Payment for fixed assets, constructions and other long-term assets' '1. Profit before tax' '1. Revenue' '10. General and administrative expenses' '11. Operating profit' '12. Other income' '13.Other expenses' '14. Other profit' '15. Profit before tax' '16. Current corporate income tax expenses' '17. Deferred income tax expenses (*)' '18. Net profit after tax' '19. Earnings per share' '2. Deduction from revenue' '2. Receipts from disposal of fixed assets and other long-term assets' '20. Diluted earnings per share' "3. Loans, purchases of other entities' debt instruments" '3. Net revenue' '3. Operating profit before changes in working capital' '3. Proceeds from borrowings' '4. Cost of goods sold' '4. Principal repayments' "4. Receipts from loan repayments, sale of other entities' debt instruments" '5. Gross profit' '5. Repayment of financial leases' '6. Collections on investment in other entities' '6. Dividends paid, profits distributed to owners' '6. Financial income' '7. Dividends, interest and profit received' '7. Financial expenses' "8. Share of associates and joint ventures' result" '9. Selling expenses' 'Accrual ratio (Balance sheet method)' 'Accrual ratio (Cash flow method)' 'Accrual ratio CF' 'Adjustments for' 'Beta' 'Book value per share (BVPS)' 'Cash and cash equivalents at beginning of the period' 'Cash and cash equivalents at end of the period' 'Cash flow per share (CPS)' 'Cash ratio' 'Cash return on equity' 'Cash return to assets' 'Cash to income' 'Cash/Short-term assets' 'Construction in progress/Fixed assets' 'Corporate income tax paid' 'Cost of goods sold/Net revenue' 'Days of inventory on hand' 'Days of sales outstanding' 'Debt coverage' 'Debt to assets' 'Debt to equity' 'Depreciation of fixed assets and properties investment' 'Dividend yield' 'EBIT margin' 'EBITDA/Net revenue' 'Equity to assets' 'Equity turnover' 'EV/EBIT' 'EV/EBITDA' 'Exchange difference due to re-valuation of ending balances' 'Finance lease/Fixed assets' 'Fixed asset turnover' 'Fixed assets/Total assets' 'Foreign exchange (gain)/loss from revaluation of monetary items denominated in foreign currencies' 'General and Administrative expenses/Net revenue' 'Gross profit' 'Gross profit margin' 'Increase/(decrease) in payables (other than interest, corporate income tax)' 'Intangible fixed assets/Fixed assets' 'Interest coverage' 'Interest expense' 'Interest expenses/Net revenue' 'Interest paid' 'Inventory turnover' 'Inventory/Short-term assets' 'Liabilities' 'Liabilities to assets' 'Liabilities to equity' 'Long-term assets/Total assets' 'Long-term liabilities' 'Loss/(profit) from investment activities' "Minority's interest" 'Net cash flows during the period' 'Net cash flows from financing activities' 'Net cash flows from investing activities' 'Net cash flows from operating activities' 'Net cash flows/Short -term liabilities' 'Net profit margin' 'Net revenue' 'Number of days of payables' 'Of which: Interest expenses' 'Other payments for operating activities' 'Other receipts from operating activities' 'Other Short-term assets/Short-term assets' "Owner's equity" 'P/B' 'P/E' 'P/S' 'Payables turnover' 'Profit after tax for shareholders of parent company' 'Profit after tax for shareholders of the parent company' 'Profit before tax' 'Quick ratio' 'Quick ratio (except: Inventories, Short-term receivables - reference)' 'Ratios - BHN' 'Ratios - HAD' 'Ratios - HAT' 'Ratios - SAB' 'Ratios - SCD' 'Ratios - SMB' 'Ratios - THB' 'Ratios - VCF' 'Ratios - VDL' 'Receivables turnover' 'Return on capital employed (ROCE)' 'ROA' 'ROE' 'Selling expenses/Net revenue' 'Short-term assets/Total assets' 'Short-term investments/Short-term assets' 'Short-term liabilities to equity' 'Short-term liabilities to total liabilities' 'Short-term ratio' 'Short-term receivables/Short-term assets' 'Tangible fixed assets/Fixed assets' 'Total asset turnover' 'Total assets' 'Trailing EPS']
In [ ]:
# Select a subset of key indicators based on the provided data
key_indicators = [
'Gross profit',
'Total assets',
'Net revenue',
'Operating profit',
'Net profit after tax',
'Gross profit margin',
'ROA',
'ROE',
'Debt to equity'
]
# Filter the data for key indicators
filtered_data = beverage_data[beverage_data['Indicator'].isin(key_indicators)]
# Pivot the data to get 'Value' for different indicators
pivot_table = filtered_data.pivot_table(index='Year', columns='Indicator', values='Value')
# Calculate descriptive statistics for the key indicators
descriptive_stats = pivot_table.describe()
# Select only essential statistics (mean, std, min, max)
essential_stats = descriptive_stats.loc[['mean', 'std', 'min', 'max']]
# Display the essential descriptive statistics table
print(essential_stats)
Indicator Debt to equity Gross profit Gross profit margin Net revenue \ mean 32.792963 0.718889 19.847037 0.514444 std 30.034726 35.171263 1.410885 24.586093 min 5.582222 -25.656667 18.728889 -15.312222 max 65.020000 40.651111 21.432222 28.838889 Indicator ROA ROE Total assets mean 8.197778 11.774444 5.690000 std 2.605647 4.444736 7.506447 min 6.047778 8.327778 -1.448889 max 11.095556 16.791111 13.516667
In [ ]:
# Replace missing values with "Unknown"
beverage_data['Category'].fillna('Unknown', inplace=True)
beverage_data['Category 2'].fillna('Unknown', inplace=True)
# Ensure all values in 'Indicator' column are strings and strip leading/trailing whitespace
beverage_data['Indicator'] = beverage_data['Indicator'].astype(str).str.strip()
# Encode categorical columns
label_encoder = LabelEncoder()
non_numeric_columns = beverage_data.select_dtypes(include=['object']).columns
for column in non_numeric_columns:
beverage_data[column] = label_encoder.fit_transform(beverage_data[column])
# Normalize the 'Value' column
scaler = StandardScaler()
beverage_data['Value'] = scaler.fit_transform(beverage_data[['Value']])
# Display the first few rows of the preprocessed data
print(beverage_data.head())
# Display summary statistics
desc_stats = beverage_data.describe()
print(desc_stats)
Indicator Company Year Value Category Category 2
0 154 0 2020 -0.180585 0 21
1 154 0 2021 -0.180879 0 21
2 154 0 2022 -0.181173 0 21
3 154 3 2020 -0.186758 0 21
4 154 3 2021 -0.186464 0 21
Indicator Company Year Value Category \
count 4558.000000 4558.000000 4558.000000 4.558000e+03 4558.000000
mean 102.818780 3.969724 2020.985739 -6.235566e-18 2.412023
std 62.760246 2.566039 0.818654 1.000110e+00 1.621816
min 0.000000 0.000000 2020.000000 -6.833770e+00 0.000000
25% 49.000000 2.000000 2020.000000 -1.785277e-01 1.000000
50% 97.000000 4.000000 2021.000000 -1.751401e-01 3.000000
75% 154.000000 6.000000 2022.000000 -1.541201e-01 4.000000
max 219.000000 8.000000 2022.000000 1.821011e+01 6.000000
Category 2
count 4558.000000
mean 11.661913
std 7.359719
min 0.000000
25% 5.000000
50% 11.000000
75% 20.000000
max 22.000000
/var/folders/57/3tgq502n7y31r_7lgrpyp6cc0000gn/T/ipykernel_34070/478313903.py:2: FutureWarning: A value is trying to be set on a copy of a DataFrame or Series through chained assignment using an inplace method.
The behavior will change in pandas 3.0. This inplace method will never work because the intermediate object on which we are setting values always behaves as a copy.
For example, when doing 'df[col].method(value, inplace=True)', try using 'df.method({col: value}, inplace=True)' or df[col] = df[col].method(value) instead, to perform the operation inplace on the original object.
beverage_data['Category'].fillna('Unknown', inplace=True)
/var/folders/57/3tgq502n7y31r_7lgrpyp6cc0000gn/T/ipykernel_34070/478313903.py:3: FutureWarning: A value is trying to be set on a copy of a DataFrame or Series through chained assignment using an inplace method.
The behavior will change in pandas 3.0. This inplace method will never work because the intermediate object on which we are setting values always behaves as a copy.
For example, when doing 'df[col].method(value, inplace=True)', try using 'df.method({col: value}, inplace=True)' or df[col] = df[col].method(value) instead, to perform the operation inplace on the original object.
beverage_data['Category 2'].fillna('Unknown', inplace=True)